From 4f0edf1e0f05899d65b5fd8f02fbcdd30abdbb05 Mon Sep 17 00:00:00 2001 From: Kaijie Chen Date: Fri, 12 Jan 2024 20:01:48 +0800 Subject: [PATCH] [fix](move-memtable) set idle timeout equal to load timeout --- be/src/cloud/config.cpp | 8 +- be/src/cloud/config.h | 12 +- be/src/common/config.cpp | 4 +- be/src/common/config.h | 4 +- be/src/common/daemon.cpp | 13 + be/src/common/daemon.h | 1 + be/src/exec/data_sink.cpp | 32 +- be/src/exec/data_sink.h | 8 - be/src/exec/exec_node.cpp | 21 +- be/src/exec/exec_node.h | 11 +- be/src/exprs/bloom_filter_func.h | 47 +- be/src/exprs/runtime_filter_slots.h | 16 +- be/src/olap/wal/wal_manager.cpp | 6 +- be/src/pipeline/exec/exchange_sink_buffer.cpp | 10 +- be/src/pipeline/exec/exchange_sink_buffer.h | 1 - .../pipeline/exec/exchange_sink_operator.cpp | 9 +- be/src/pipeline/exec/exchange_sink_operator.h | 4 +- .../exec/exchange_source_operator.cpp | 3 +- .../pipeline/exec/exchange_source_operator.h | 5 - be/src/pipeline/exec/hashjoin_build_sink.cpp | 24 +- be/src/pipeline/exec/hashjoin_build_sink.h | 6 +- be/src/pipeline/exec/operator.h | 21 - .../exec/result_file_sink_operator.cpp | 13 +- .../pipeline/exec/result_file_sink_operator.h | 2 - be/src/pipeline/exec/result_sink_operator.cpp | 3 +- be/src/pipeline/exec/scan_operator.cpp | 10 +- be/src/pipeline/exec/scan_operator.h | 4 - be/src/pipeline/pipeline.h | 7 - be/src/pipeline/pipeline_fragment_context.cpp | 5 +- be/src/pipeline/pipeline_fragment_context.h | 12 - be/src/pipeline/pipeline_task.cpp | 26 - be/src/pipeline/pipeline_task.h | 3 - be/src/pipeline/pipeline_x/operator.cpp | 4 +- be/src/pipeline/pipeline_x/operator.h | 7 +- .../pipeline_x_fragment_context.cpp | 29 +- .../pipeline/pipeline_x/pipeline_x_task.cpp | 5 +- be/src/pipeline/pipeline_x/pipeline_x_task.h | 3 +- be/src/runtime/buffer_control_block.cpp | 4 +- be/src/runtime/buffer_control_block.h | 19 +- be/src/runtime/exec_env.h | 6 + be/src/runtime/exec_env_init.cpp | 10 +- be/src/runtime/fragment_mgr.cpp | 1 - be/src/runtime/group_commit_mgr.cpp | 4 +- be/src/runtime/plan_fragment_executor.cpp | 59 +- be/src/runtime/plan_fragment_executor.h | 18 +- be/src/runtime/query_context.cpp | 13 + be/src/runtime/query_context.h | 4 + be/src/runtime/query_statistics.cpp | 10 +- be/src/runtime/query_statistics.h | 23 +- .../runtime/runtime_query_statistics_mgr.cpp | 148 +++ be/src/runtime/runtime_query_statistics_mgr.h | 58 + be/src/service/internal_service.cpp | 4 +- be/src/util/simd/vstring_function.h | 57 +- .../aggregate_function_null.h | 30 +- be/src/vec/columns/column.h | 5 +- be/src/vec/columns/column_array.cpp | 15 +- be/src/vec/columns/column_const.h | 4 +- be/src/vec/columns/column_decimal.cpp | 4 +- be/src/vec/columns/column_dummy.h | 2 - be/src/vec/columns/column_map.cpp | 6 + be/src/vec/columns/column_map.h | 2 + be/src/vec/columns/column_nullable.cpp | 29 +- be/src/vec/columns/column_nullable.h | 3 +- be/src/vec/columns/column_object.cpp | 2 +- be/src/vec/columns/column_string.cpp | 4 +- .../vec/common/hash_table/hash_map_context.h | 11 +- be/src/vec/common/memcpy_small.h | 9 +- be/src/vec/common/pod_array.h | 8 + be/src/vec/common/schema_util.cpp | 43 +- be/src/vec/common/sort/partition_sorter.cpp | 2 +- be/src/vec/common/sort/sorter.cpp | 6 +- be/src/vec/common/sort/sorter.h | 2 +- be/src/vec/common/typeid_cast.h | 9 +- be/src/vec/core/block.cpp | 34 +- be/src/vec/core/block.h | 5 +- be/src/vec/core/column_with_type_and_name.cpp | 25 + be/src/vec/core/column_with_type_and_name.h | 13 +- be/src/vec/core/sort_cursor.h | 10 +- be/src/vec/data_types/data_type.h | 4 - be/src/vec/data_types/data_type_nullable.cpp | 4 - be/src/vec/data_types/data_type_nullable.h | 1 - be/src/vec/data_types/get_least_supertype.cpp | 4 +- be/src/vec/exec/join/vhash_join_node.cpp | 22 +- be/src/vec/exec/join/vhash_join_node.h | 52 +- be/src/vec/exec/scan/new_olap_scan_node.cpp | 15 - be/src/vec/exec/scan/new_olap_scan_node.h | 2 - be/src/vec/exec/scan/vscan_node.cpp | 3 + be/src/vec/exec/scan/vscanner.cpp | 7 + be/src/vec/exec/scan/vscanner.h | 7 + be/src/vec/exec/vexchange_node.cpp | 17 +- be/src/vec/exec/vexchange_node.h | 4 - be/src/vec/exec/vjdbc_connector.cpp | 18 +- be/src/vec/exprs/vcast_expr.cpp | 7 +- be/src/vec/functions/function.cpp | 132 +-- be/src/vec/functions/function.h | 28 +- be/src/vec/functions/function_bitmap.cpp | 7 +- be/src/vec/functions/function_cast.h | 129 +-- be/src/vec/functions/function_rpc.cpp | 2 +- be/src/vec/functions/function_rpc.h | 2 +- be/src/vec/functions/function_string.h | 226 ++-- be/src/vec/functions/functions_geo.cpp | 32 +- be/src/vec/functions/functions_geo.h | 1 - be/src/vec/functions/functions_logical.cpp | 5 +- be/src/vec/functions/least_greast.cpp | 3 +- be/src/vec/functions/nullif.cpp | 28 +- be/src/vec/runtime/vdata_stream_mgr.cpp | 15 +- be/src/vec/runtime/vdata_stream_mgr.h | 11 +- be/src/vec/runtime/vdata_stream_recvr.cpp | 21 +- be/src/vec/runtime/vdata_stream_recvr.h | 15 +- be/src/vec/sink/load_stream_stub.cpp | 5 +- be/src/vec/sink/load_stream_stub.h | 2 +- be/src/vec/sink/vdata_stream_sender.cpp | 39 +- be/src/vec/sink/vdata_stream_sender.h | 22 +- be/src/vec/sink/vresult_file_sink.cpp | 15 +- be/src/vec/sink/vresult_file_sink.h | 4 - be/src/vec/sink/vresult_sink.cpp | 8 +- be/src/vec/sink/vresult_sink.h | 3 - be/src/vec/sink/writer/vtablet_writer_v2.cpp | 5 +- be/src/vec/sink/writer/vwal_writer.cpp | 4 + be/test/vec/runtime/vdata_stream_test.cpp | 7 +- .../Show-Statements/SHOW-PROCESSLIST.md | 2 +- .../docs/ecosystem/flink-doris-connector.md | 1 + .../Show-Statements/SHOW-PROCESSLIST.md | 2 +- .../org/apache/doris/catalog/StructField.java | 2 +- .../java/org/apache/doris/common/Config.java | 25 + .../apache/doris/analysis/StructLiteral.java | 21 +- .../java/org/apache/doris/catalog/Env.java | 10 + .../translator/PhysicalPlanTranslator.java | 2 +- .../doris/nereids/trees/expressions/Cast.java | 2 +- .../functions/scalar/CreateStruct.java | 8 +- .../expressions/literal/ArrayLiteral.java | 6 +- .../trees/expressions/literal/MapLiteral.java | 32 +- .../expressions/literal/StructLiteral.java | 56 +- .../doris/nereids/trees/plans/Plan.java | 2 +- .../apache/doris/planner/OlapScanNode.java | 23 +- .../org/apache/doris/plugin/AuditEvent.java | 2 + .../org/apache/doris/qe/AuditLogHelper.java | 2 +- .../org/apache/doris/qe/ConnectProcessor.java | 3 +- .../org/apache/doris/qe/QeProcessorImpl.java | 9 + .../org/apache/doris/qe/SessionVariable.java | 2 +- .../WorkloadRuntimeStatusMgr.java | 223 ++++ .../apache/doris/analysis/MapLiteralTest.java | 4 +- .../apache/doris/analysis/SelectStmtTest.java | 5 + gensrc/proto/internal_service.proto | 1 + gensrc/thrift/FrontendService.thrift | 7 + ...low_wal_disk_space_fault_injection.csv.gz} | Bin .../eliminate_outer_join.out | 185 ++- .../eager_aggregate/basic.out | 66 +- .../eager_aggregate/basic_one_side.out | 60 +- .../push_down_count_through_join.out | 664 +++++------ .../push_down_count_through_join_one_side.out | 610 +++++----- .../push_down_max_through_join.out | 337 +++--- .../push_down_min_through_join.out | 337 +++--- .../push_down_sum_through_join.out | 361 +++--- .../push_down_sum_through_join_one_side.out | 337 +++--- .../eliminate_join_condition.out | 43 +- .../eliminate_not_null/eliminate_not_null.out | 59 +- .../eliminate_outer_join.out | 94 +- .../push_down_filter_other_condition.out | 410 +++---- .../push_filter_inside_join.out | 139 +-- .../filter_push_down/push_filter_through.out | 599 ++++++---- .../limit_push_down/limit_push_down.out | 1004 +++++++---------- .../limit_push_down/order_push_down.out | 830 ++++++-------- .../nereids_rules_p0/pkfk/eliminate_inner.out | 227 ++-- .../predicate_infer/infer_predicate.out | 533 ++++----- .../push_down_limit_distinct.out | 24 +- ...push_down_top_n_distinct_through_union.out | 182 ++- .../push_down_top_n_through_union.out | 245 ++-- .../transposeJoin/transposeSemiJoinAgg.out | 132 +-- .../shape/query17.out | 4 +- .../shape/query24.out | 4 +- .../shape/query25.out | 4 +- .../shape/query29.out | 4 +- .../shape/query64.out | 4 +- .../shape/query65.out | 4 +- .../noStatsRfPrune/query17.out | 4 +- .../noStatsRfPrune/query25.out | 4 +- .../noStatsRfPrune/query29.out | 4 +- .../noStatsRfPrune/query58.out | 8 +- .../noStatsRfPrune/query64.out | 10 +- .../noStatsRfPrune/query65.out | 8 +- .../noStatsRfPrune/query83.out | 8 +- .../noStatsRfPrune/query95.out | 2 +- .../no_stats_shape/query17.out | 8 +- .../no_stats_shape/query24.out | 4 +- .../no_stats_shape/query25.out | 8 +- .../no_stats_shape/query29.out | 8 +- .../no_stats_shape/query58.out | 4 +- .../no_stats_shape/query64.out | 8 +- .../no_stats_shape/query65.out | 4 +- .../no_stats_shape/query72.out | 4 +- .../no_stats_shape/query83.out | 4 +- .../no_stats_shape/query95.out | 2 +- .../rf_prune/query64.out | 20 +- .../shape/query17.out | 4 +- .../shape/query24.out | 4 +- .../shape/query25.out | 4 +- .../shape/query29.out | 4 +- .../shape/query64.out | 38 +- .../shape/query65.out | 4 +- .../nostats_rf_prune/q20-rewrite.out | 4 +- .../nostats_rf_prune/q20.out | 4 +- .../nostats_rf_prune/q5.out | 4 +- .../rf_prune/q20-rewrite.out | 4 +- .../rf_prune/q20.out | 4 +- .../shape/q20-rewrite.out | 4 +- .../shape/q20.out | 4 +- .../nereids_tpch_shape_sf1000_p0/shape/q5.out | 4 +- .../nereids_tpch_shape_sf1000_p0/shape/q9.out | 6 +- .../shape_no_stats/q20-rewrite.out | 4 +- .../shape_no_stats/q20.out | 4 +- .../shape_no_stats/q5.out | 4 +- .../shape_no_stats/q9.out | 8 +- .../cast_function/test_cast_struct.out | 2 +- .../test_struct_functions_by_literal.out | 12 +- .../test_load_stream_fault_injection.groovy | 20 +- ..._low_wal_disk_space_fault_injection.groovy | 66 ++ ...l_mem_back_pressure_fault_injection.groovy | 31 + .../insert_p0/test_struct_insert.groovy | 32 +- .../nereids_p0/create_table/test_ctas.groovy | 4 +- .../eliminate_outer_join.groovy | 6 +- .../expression/case_when_to_if.groovy | 10 +- .../nereids_p0/expression/topn_to_max.groovy | 4 +- .../bind_relation/bind_view.groovy | 1 + .../eager_aggregate/basic.groovy | 1 + .../eager_aggregate/basic_one_side.groovy | 1 + .../push_down_count_through_join.groovy | 1 + ...sh_down_count_through_join_one_side.groovy | 1 + .../push_down_max_through_join.groovy | 1 + .../push_down_min_through_join.groovy | 1 + .../push_down_sum_through_join.groovy | 1 + ...push_down_sum_through_join_one_side.groovy | 1 + .../eliminate_join_condition.groovy | 1 + .../eliminate_not_null.groovy | 1 + .../eliminate_outer_join.groovy | 1 + .../filter_push_through_aggregate.groovy | 1 + .../push_down_alias_through_join.groovy | 1 + .../push_down_expression_in_hash_join.groovy | 1 + .../push_down_filter_other_condition.groovy | 1 + .../push_filter_inside_join.groovy | 1 + .../push_filter_through.groovy | 1 + .../infer_set_operator_distinct.groovy | 1 + .../limit_push_down/limit_push_down.groovy | 1 + .../limit_push_down/order_push_down.groovy | 1 + .../suites/nereids_rules_p0/load.groovy | 1 + .../aggregate_with_roll_up.groovy | 2 + .../aggregate_without_roll_up.groovy | 2 + .../mv/join/inner/inner_join.groovy | 1 + .../mv/join/left_outer/outer_join.groovy | 2 + .../mv/partition_mv_rewrite.groovy | 1 + .../test_date_function_prune.groovy | 2 + .../test_multi_range_partition.groovy | 2 + .../test_partition_unique_model.groovy | 1 + .../pkfk/eliminate_inner.groovy | 2 + .../predicate_infer/infer_predicate.groovy | 1 + .../push_down_limit_distinct.groovy | 1 + ...h_down_top_n_distinct_through_union.groovy | 1 + .../push_down_top_n_through_union.groovy | 1 + .../subquery_basic_pullup_and_subquery.groovy | 1 + .../basic/subquery_basic_pullup_basic.groovy | 1 + .../basic/subquery_basic_pullup_or.groovy | 1 + .../subquery_basic_pullup_or_subquery.groovy | 1 + .../basic/subquery_basic_pullup_uk.groovy | 1 + .../basic/subquery_basic_ullup_and.groovy | 1 + .../misc/subquery_misc_pullup_dml.groovy | 1 + .../misc/subquery_misc_pullup_misc.groovy | 1 + .../subquery_multitable_pullup_and.groovy | 1 + ...uery_multitable_pullup_and_subquery.groovy | 1 + .../subquery_multitable_pullup_basic.groovy | 1 + .../subquery_multitable_pullup_or.groovy | 1 + ...query_multitable_pullup_or_subquery.groovy | 1 + .../subquery_topop_pullup_groupby.groovy | 1 + .../topop/subquery_topop_pullup_having.groovy | 1 + .../subquery_topop_pullup_orderby.groovy | 1 + .../subquery_topop_pullup_selectlist.groovy | 1 + .../subquery_topop_pullup_winfunc.groovy | 1 + .../transposeJoin/transposeSemiJoinAgg.groovy | 2 + .../ddl/shape.tmpl | 1 + .../shape/query1.groovy | 1 + .../shape/query10.groovy | 1 + .../shape/query11.groovy | 1 + .../shape/query12.groovy | 1 + .../shape/query13.groovy | 1 + .../shape/query14.groovy | 1 + .../shape/query15.groovy | 1 + .../shape/query16.groovy | 1 + .../shape/query17.groovy | 1 + .../shape/query18.groovy | 1 + .../shape/query19.groovy | 1 + .../shape/query2.groovy | 1 + .../shape/query20.groovy | 1 + .../shape/query21.groovy | 1 + .../shape/query22.groovy | 1 + .../shape/query23.groovy | 1 + .../shape/query24.groovy | 1 + .../shape/query25.groovy | 1 + .../shape/query26.groovy | 1 + .../shape/query27.groovy | 1 + .../shape/query28.groovy | 1 + .../shape/query29.groovy | 1 + .../shape/query3.groovy | 1 + .../shape/query30.groovy | 1 + .../shape/query31.groovy | 1 + .../shape/query32.groovy | 1 + .../shape/query33.groovy | 1 + .../shape/query34.groovy | 1 + .../shape/query35.groovy | 1 + .../shape/query36.groovy | 1 + .../shape/query37.groovy | 1 + .../shape/query38.groovy | 1 + .../shape/query39.groovy | 1 + .../shape/query4.groovy | 1 + .../shape/query40.groovy | 1 + .../shape/query41.groovy | 1 + .../shape/query42.groovy | 1 + .../shape/query43.groovy | 1 + .../shape/query44.groovy | 1 + .../shape/query45.groovy | 1 + .../shape/query46.groovy | 1 + .../shape/query47.groovy | 1 + .../shape/query48.groovy | 1 + .../shape/query49.groovy | 1 + .../shape/query5.groovy | 1 + .../shape/query50.groovy | 1 + .../shape/query51.groovy | 1 + .../shape/query52.groovy | 1 + .../shape/query53.groovy | 1 + .../shape/query54.groovy | 1 + .../shape/query55.groovy | 1 + .../shape/query56.groovy | 1 + .../shape/query57.groovy | 1 + .../shape/query58.groovy | 1 + .../shape/query59.groovy | 1 + .../shape/query6.groovy | 1 + .../shape/query60.groovy | 1 + .../shape/query61.groovy | 1 + .../shape/query62.groovy | 1 + .../shape/query63.groovy | 1 + .../shape/query64.groovy | 1 + .../shape/query65.groovy | 1 + .../shape/query66.groovy | 1 + .../shape/query67.groovy | 1 + .../shape/query68.groovy | 1 + .../shape/query69.groovy | 1 + .../shape/query7.groovy | 1 + .../shape/query70.groovy | 1 + .../shape/query71.groovy | 1 + .../shape/query72.groovy | 1 + .../shape/query73.groovy | 1 + .../shape/query74.groovy | 1 + .../shape/query75.groovy | 1 + .../shape/query76.groovy | 1 + .../shape/query77.groovy | 1 + .../shape/query78.groovy | 1 + .../shape/query79.groovy | 1 + .../shape/query8.groovy | 1 + .../shape/query80.groovy | 1 + .../shape/query81.groovy | 1 + .../shape/query82.groovy | 1 + .../shape/query83.groovy | 1 + .../shape/query84.groovy | 1 + .../shape/query85.groovy | 1 + .../shape/query86.groovy | 1 + .../shape/query87.groovy | 1 + .../shape/query88.groovy | 1 + .../shape/query89.groovy | 1 + .../shape/query9.groovy | 1 + .../shape/query90.groovy | 1 + .../shape/query91.groovy | 1 + .../shape/query92.groovy | 1 + .../shape/query93.groovy | 1 + .../shape/query94.groovy | 1 + .../shape/query95.groovy | 1 + .../shape/query96.groovy | 1 + .../shape/query97.groovy | 1 + .../shape/query98.groovy | 1 + .../shape/query99.groovy | 1 + .../ddl/rf_prune.tmpl | 1 + .../ddl/shape.tmpl | 1 + .../noStatsRfPrune/query1.groovy | 3 +- .../noStatsRfPrune/query10.groovy | 3 +- .../noStatsRfPrune/query11.groovy | 3 +- .../noStatsRfPrune/query12.groovy | 3 +- .../noStatsRfPrune/query13.groovy | 3 +- .../noStatsRfPrune/query14.groovy | 3 +- .../noStatsRfPrune/query15.groovy | 3 +- .../noStatsRfPrune/query16.groovy | 3 +- .../noStatsRfPrune/query17.groovy | 3 +- .../noStatsRfPrune/query18.groovy | 3 +- .../noStatsRfPrune/query19.groovy | 3 +- .../noStatsRfPrune/query2.groovy | 3 +- .../noStatsRfPrune/query20.groovy | 3 +- .../noStatsRfPrune/query21.groovy | 3 +- .../noStatsRfPrune/query22.groovy | 3 +- .../noStatsRfPrune/query23.groovy | 3 +- .../noStatsRfPrune/query24.groovy | 3 +- .../noStatsRfPrune/query25.groovy | 3 +- .../noStatsRfPrune/query26.groovy | 3 +- .../noStatsRfPrune/query27.groovy | 3 +- .../noStatsRfPrune/query28.groovy | 3 +- .../noStatsRfPrune/query29.groovy | 3 +- .../noStatsRfPrune/query3.groovy | 3 +- .../noStatsRfPrune/query30.groovy | 3 +- .../noStatsRfPrune/query31.groovy | 3 +- .../noStatsRfPrune/query32.groovy | 3 +- .../noStatsRfPrune/query33.groovy | 3 +- .../noStatsRfPrune/query34.groovy | 3 +- .../noStatsRfPrune/query35.groovy | 3 +- .../noStatsRfPrune/query36.groovy | 3 +- .../noStatsRfPrune/query37.groovy | 3 +- .../noStatsRfPrune/query38.groovy | 3 +- .../noStatsRfPrune/query39.groovy | 3 +- .../noStatsRfPrune/query4.groovy | 3 +- .../noStatsRfPrune/query40.groovy | 3 +- .../noStatsRfPrune/query41.groovy | 3 +- .../noStatsRfPrune/query42.groovy | 3 +- .../noStatsRfPrune/query43.groovy | 3 +- .../noStatsRfPrune/query44.groovy | 3 +- .../noStatsRfPrune/query45.groovy | 3 +- .../noStatsRfPrune/query46.groovy | 3 +- .../noStatsRfPrune/query47.groovy | 3 +- .../noStatsRfPrune/query48.groovy | 3 +- .../noStatsRfPrune/query49.groovy | 3 +- .../noStatsRfPrune/query5.groovy | 3 +- .../noStatsRfPrune/query50.groovy | 3 +- .../noStatsRfPrune/query51.groovy | 3 +- .../noStatsRfPrune/query52.groovy | 3 +- .../noStatsRfPrune/query53.groovy | 3 +- .../noStatsRfPrune/query54.groovy | 3 +- .../noStatsRfPrune/query55.groovy | 3 +- .../noStatsRfPrune/query56.groovy | 3 +- .../noStatsRfPrune/query57.groovy | 3 +- .../noStatsRfPrune/query58.groovy | 3 +- .../noStatsRfPrune/query59.groovy | 3 +- .../noStatsRfPrune/query6.groovy | 3 +- .../noStatsRfPrune/query60.groovy | 3 +- .../noStatsRfPrune/query61.groovy | 3 +- .../noStatsRfPrune/query62.groovy | 3 +- .../noStatsRfPrune/query63.groovy | 3 +- .../noStatsRfPrune/query64.groovy | 3 +- .../noStatsRfPrune/query65.groovy | 3 +- .../noStatsRfPrune/query66.groovy | 3 +- .../noStatsRfPrune/query67.groovy | 3 +- .../noStatsRfPrune/query68.groovy | 3 +- .../noStatsRfPrune/query69.groovy | 3 +- .../noStatsRfPrune/query7.groovy | 3 +- .../noStatsRfPrune/query70.groovy | 3 +- .../noStatsRfPrune/query71.groovy | 3 +- .../noStatsRfPrune/query72.groovy | 3 +- .../noStatsRfPrune/query73.groovy | 3 +- .../noStatsRfPrune/query74.groovy | 3 +- .../noStatsRfPrune/query75.groovy | 3 +- .../noStatsRfPrune/query76.groovy | 3 +- .../noStatsRfPrune/query77.groovy | 3 +- .../noStatsRfPrune/query78.groovy | 3 +- .../noStatsRfPrune/query79.groovy | 3 +- .../noStatsRfPrune/query8.groovy | 3 +- .../noStatsRfPrune/query80.groovy | 3 +- .../noStatsRfPrune/query81.groovy | 3 +- .../noStatsRfPrune/query82.groovy | 3 +- .../noStatsRfPrune/query83.groovy | 3 +- .../noStatsRfPrune/query84.groovy | 3 +- .../noStatsRfPrune/query85.groovy | 3 +- .../noStatsRfPrune/query86.groovy | 3 +- .../noStatsRfPrune/query87.groovy | 3 +- .../noStatsRfPrune/query88.groovy | 3 +- .../noStatsRfPrune/query89.groovy | 3 +- .../noStatsRfPrune/query9.groovy | 3 +- .../noStatsRfPrune/query90.groovy | 3 +- .../noStatsRfPrune/query91.groovy | 3 +- .../noStatsRfPrune/query92.groovy | 3 +- .../noStatsRfPrune/query93.groovy | 3 +- .../noStatsRfPrune/query94.groovy | 3 +- .../noStatsRfPrune/query95.groovy | 3 +- .../noStatsRfPrune/query96.groovy | 3 +- .../noStatsRfPrune/query97.groovy | 3 +- .../noStatsRfPrune/query98.groovy | 3 +- .../noStatsRfPrune/query99.groovy | 3 +- .../no_stats_shape/query1.groovy | 3 +- .../no_stats_shape/query10.groovy | 3 +- .../no_stats_shape/query11.groovy | 3 +- .../no_stats_shape/query12.groovy | 3 +- .../no_stats_shape/query13.groovy | 3 +- .../no_stats_shape/query14.groovy | 3 +- .../no_stats_shape/query15.groovy | 3 +- .../no_stats_shape/query16.groovy | 3 +- .../no_stats_shape/query17.groovy | 3 +- .../no_stats_shape/query18.groovy | 3 +- .../no_stats_shape/query19.groovy | 3 +- .../no_stats_shape/query2.groovy | 3 +- .../no_stats_shape/query20.groovy | 3 +- .../no_stats_shape/query21.groovy | 3 +- .../no_stats_shape/query22.groovy | 3 +- .../no_stats_shape/query23.groovy | 3 +- .../no_stats_shape/query24.groovy | 3 +- .../no_stats_shape/query25.groovy | 3 +- .../no_stats_shape/query26.groovy | 3 +- .../no_stats_shape/query27.groovy | 3 +- .../no_stats_shape/query28.groovy | 3 +- .../no_stats_shape/query29.groovy | 3 +- .../no_stats_shape/query3.groovy | 3 +- .../no_stats_shape/query30.groovy | 3 +- .../no_stats_shape/query31.groovy | 3 +- .../no_stats_shape/query32.groovy | 3 +- .../no_stats_shape/query33.groovy | 3 +- .../no_stats_shape/query34.groovy | 3 +- .../no_stats_shape/query35.groovy | 3 +- .../no_stats_shape/query36.groovy | 3 +- .../no_stats_shape/query37.groovy | 3 +- .../no_stats_shape/query38.groovy | 3 +- .../no_stats_shape/query39.groovy | 3 +- .../no_stats_shape/query4.groovy | 3 +- .../no_stats_shape/query40.groovy | 3 +- .../no_stats_shape/query41.groovy | 3 +- .../no_stats_shape/query42.groovy | 3 +- .../no_stats_shape/query43.groovy | 3 +- .../no_stats_shape/query44.groovy | 3 +- .../no_stats_shape/query45.groovy | 3 +- .../no_stats_shape/query46.groovy | 3 +- .../no_stats_shape/query47.groovy | 3 +- .../no_stats_shape/query48.groovy | 3 +- .../no_stats_shape/query49.groovy | 3 +- .../no_stats_shape/query5.groovy | 3 +- .../no_stats_shape/query50.groovy | 3 +- .../no_stats_shape/query51.groovy | 3 +- .../no_stats_shape/query52.groovy | 3 +- .../no_stats_shape/query53.groovy | 3 +- .../no_stats_shape/query54.groovy | 3 +- .../no_stats_shape/query55.groovy | 3 +- .../no_stats_shape/query56.groovy | 3 +- .../no_stats_shape/query57.groovy | 3 +- .../no_stats_shape/query58.groovy | 3 +- .../no_stats_shape/query59.groovy | 3 +- .../no_stats_shape/query6.groovy | 3 +- .../no_stats_shape/query60.groovy | 3 +- .../no_stats_shape/query61.groovy | 3 +- .../no_stats_shape/query62.groovy | 3 +- .../no_stats_shape/query63.groovy | 3 +- .../no_stats_shape/query64.groovy | 3 +- .../no_stats_shape/query65.groovy | 3 +- .../no_stats_shape/query66.groovy | 3 +- .../no_stats_shape/query67.groovy | 3 +- .../no_stats_shape/query68.groovy | 3 +- .../no_stats_shape/query69.groovy | 3 +- .../no_stats_shape/query7.groovy | 3 +- .../no_stats_shape/query70.groovy | 3 +- .../no_stats_shape/query71.groovy | 3 +- .../no_stats_shape/query72.groovy | 3 +- .../no_stats_shape/query73.groovy | 3 +- .../no_stats_shape/query74.groovy | 3 +- .../no_stats_shape/query75.groovy | 3 +- .../no_stats_shape/query76.groovy | 3 +- .../no_stats_shape/query77.groovy | 3 +- .../no_stats_shape/query78.groovy | 3 +- .../no_stats_shape/query79.groovy | 3 +- .../no_stats_shape/query8.groovy | 3 +- .../no_stats_shape/query80.groovy | 3 +- .../no_stats_shape/query81.groovy | 3 +- .../no_stats_shape/query82.groovy | 3 +- .../no_stats_shape/query83.groovy | 3 +- .../no_stats_shape/query84.groovy | 3 +- .../no_stats_shape/query85.groovy | 3 +- .../no_stats_shape/query86.groovy | 3 +- .../no_stats_shape/query87.groovy | 3 +- .../no_stats_shape/query88.groovy | 3 +- .../no_stats_shape/query89.groovy | 3 +- .../no_stats_shape/query9.groovy | 3 +- .../no_stats_shape/query90.groovy | 3 +- .../no_stats_shape/query91.groovy | 3 +- .../no_stats_shape/query92.groovy | 3 +- .../no_stats_shape/query93.groovy | 3 +- .../no_stats_shape/query94.groovy | 3 +- .../no_stats_shape/query95.groovy | 3 +- .../no_stats_shape/query96.groovy | 3 +- .../no_stats_shape/query97.groovy | 3 +- .../no_stats_shape/query98.groovy | 3 +- .../no_stats_shape/query99.groovy | 3 +- .../rf_prune/query1.groovy | 1 + .../rf_prune/query10.groovy | 1 + .../rf_prune/query11.groovy | 1 + .../rf_prune/query12.groovy | 1 + .../rf_prune/query13.groovy | 1 + .../rf_prune/query14.groovy | 1 + .../rf_prune/query15.groovy | 1 + .../rf_prune/query16.groovy | 1 + .../rf_prune/query17.groovy | 1 + .../rf_prune/query18.groovy | 1 + .../rf_prune/query19.groovy | 1 + .../rf_prune/query2.groovy | 1 + .../rf_prune/query20.groovy | 1 + .../rf_prune/query21.groovy | 1 + .../rf_prune/query22.groovy | 1 + .../rf_prune/query23.groovy | 1 + .../rf_prune/query24.groovy | 1 + .../rf_prune/query25.groovy | 1 + .../rf_prune/query26.groovy | 1 + .../rf_prune/query27.groovy | 1 + .../rf_prune/query28.groovy | 1 + .../rf_prune/query29.groovy | 1 + .../rf_prune/query3.groovy | 1 + .../rf_prune/query30.groovy | 1 + .../rf_prune/query31.groovy | 1 + .../rf_prune/query32.groovy | 1 + .../rf_prune/query33.groovy | 1 + .../rf_prune/query34.groovy | 1 + .../rf_prune/query35.groovy | 1 + .../rf_prune/query36.groovy | 1 + .../rf_prune/query37.groovy | 1 + .../rf_prune/query38.groovy | 1 + .../rf_prune/query39.groovy | 1 + .../rf_prune/query4.groovy | 1 + .../rf_prune/query40.groovy | 1 + .../rf_prune/query41.groovy | 1 + .../rf_prune/query42.groovy | 1 + .../rf_prune/query43.groovy | 1 + .../rf_prune/query44.groovy | 1 + .../rf_prune/query45.groovy | 1 + .../rf_prune/query46.groovy | 1 + .../rf_prune/query47.groovy | 1 + .../rf_prune/query48.groovy | 1 + .../rf_prune/query49.groovy | 1 + .../rf_prune/query5.groovy | 1 + .../rf_prune/query50.groovy | 1 + .../rf_prune/query51.groovy | 1 + .../rf_prune/query52.groovy | 1 + .../rf_prune/query53.groovy | 1 + .../rf_prune/query54.groovy | 1 + .../rf_prune/query55.groovy | 1 + .../rf_prune/query56.groovy | 1 + .../rf_prune/query57.groovy | 1 + .../rf_prune/query58.groovy | 1 + .../rf_prune/query59.groovy | 1 + .../rf_prune/query6.groovy | 1 + .../rf_prune/query60.groovy | 1 + .../rf_prune/query61.groovy | 1 + .../rf_prune/query62.groovy | 1 + .../rf_prune/query63.groovy | 1 + .../rf_prune/query64.groovy | 1 + .../rf_prune/query65.groovy | 1 + .../rf_prune/query66.groovy | 1 + .../rf_prune/query67.groovy | 1 + .../rf_prune/query68.groovy | 1 + .../rf_prune/query69.groovy | 1 + .../rf_prune/query7.groovy | 1 + .../rf_prune/query70.groovy | 1 + .../rf_prune/query71.groovy | 1 + .../rf_prune/query72.groovy | 1 + .../rf_prune/query73.groovy | 1 + .../rf_prune/query74.groovy | 1 + .../rf_prune/query75.groovy | 1 + .../rf_prune/query76.groovy | 1 + .../rf_prune/query77.groovy | 1 + .../rf_prune/query78.groovy | 1 + .../rf_prune/query79.groovy | 1 + .../rf_prune/query8.groovy | 1 + .../rf_prune/query80.groovy | 1 + .../rf_prune/query81.groovy | 1 + .../rf_prune/query82.groovy | 1 + .../rf_prune/query83.groovy | 1 + .../rf_prune/query84.groovy | 1 + .../rf_prune/query85.groovy | 1 + .../rf_prune/query86.groovy | 1 + .../rf_prune/query87.groovy | 1 + .../rf_prune/query88.groovy | 1 + .../rf_prune/query89.groovy | 1 + .../rf_prune/query9.groovy | 1 + .../rf_prune/query90.groovy | 1 + .../rf_prune/query91.groovy | 1 + .../rf_prune/query92.groovy | 1 + .../rf_prune/query93.groovy | 1 + .../rf_prune/query94.groovy | 1 + .../rf_prune/query95.groovy | 1 + .../rf_prune/query96.groovy | 1 + .../rf_prune/query97.groovy | 1 + .../rf_prune/query98.groovy | 1 + .../rf_prune/query99.groovy | 1 + .../shape/query1.groovy | 1 + .../shape/query10.groovy | 1 + .../shape/query11.groovy | 1 + .../shape/query12.groovy | 1 + .../shape/query13.groovy | 1 + .../shape/query14.groovy | 1 + .../shape/query15.groovy | 1 + .../shape/query16.groovy | 1 + .../shape/query17.groovy | 1 + .../shape/query18.groovy | 1 + .../shape/query19.groovy | 1 + .../shape/query2.groovy | 1 + .../shape/query20.groovy | 1 + .../shape/query21.groovy | 1 + .../shape/query22.groovy | 1 + .../shape/query23.groovy | 1 + .../shape/query24.groovy | 1 + .../shape/query25.groovy | 1 + .../shape/query26.groovy | 1 + .../shape/query27.groovy | 1 + .../shape/query28.groovy | 1 + .../shape/query29.groovy | 1 + .../shape/query3.groovy | 1 + .../shape/query30.groovy | 1 + .../shape/query31.groovy | 1 + .../shape/query32.groovy | 1 + .../shape/query33.groovy | 1 + .../shape/query34.groovy | 1 + .../shape/query35.groovy | 1 + .../shape/query36.groovy | 1 + .../shape/query37.groovy | 1 + .../shape/query38.groovy | 1 + .../shape/query39.groovy | 1 + .../shape/query4.groovy | 1 + .../shape/query40.groovy | 1 + .../shape/query41.groovy | 1 + .../shape/query42.groovy | 1 + .../shape/query43.groovy | 1 + .../shape/query44.groovy | 1 + .../shape/query45.groovy | 1 + .../shape/query46.groovy | 1 + .../shape/query47.groovy | 1 + .../shape/query48.groovy | 1 + .../shape/query49.groovy | 1 + .../shape/query5.groovy | 1 + .../shape/query50.groovy | 1 + .../shape/query51.groovy | 1 + .../shape/query52.groovy | 1 + .../shape/query53.groovy | 1 + .../shape/query54.groovy | 1 + .../shape/query55.groovy | 1 + .../shape/query56.groovy | 1 + .../shape/query57.groovy | 1 + .../shape/query58.groovy | 1 + .../shape/query59.groovy | 1 + .../shape/query6.groovy | 1 + .../shape/query60.groovy | 1 + .../shape/query61.groovy | 1 + .../shape/query62.groovy | 1 + .../shape/query63.groovy | 1 + .../shape/query64.groovy | 1 + .../shape/query65.groovy | 1 + .../shape/query66.groovy | 1 + .../shape/query67.groovy | 1 + .../shape/query68.groovy | 1 + .../shape/query69.groovy | 1 + .../shape/query7.groovy | 1 + .../shape/query70.groovy | 1 + .../shape/query71.groovy | 1 + .../shape/query72.groovy | 1 + .../shape/query73.groovy | 1 + .../shape/query74.groovy | 1 + .../shape/query75.groovy | 1 + .../shape/query76.groovy | 1 + .../shape/query77.groovy | 1 + .../shape/query78.groovy | 1 + .../shape/query79.groovy | 1 + .../shape/query8.groovy | 1 + .../shape/query80.groovy | 1 + .../shape/query81.groovy | 1 + .../shape/query82.groovy | 1 + .../shape/query83.groovy | 1 + .../shape/query84.groovy | 1 + .../shape/query85.groovy | 1 + .../shape/query86.groovy | 1 + .../shape/query87.groovy | 1 + .../shape/query88.groovy | 1 + .../shape/query89.groovy | 1 + .../shape/query9.groovy | 1 + .../shape/query90.groovy | 1 + .../shape/query91.groovy | 1 + .../shape/query92.groovy | 1 + .../shape/query93.groovy | 1 + .../shape/query94.groovy | 1 + .../shape/query95.groovy | 1 + .../shape/query96.groovy | 1 + .../shape/query97.groovy | 1 + .../shape/query98.groovy | 1 + .../shape/query99.groovy | 1 + .../nostats_rf_prune/q1.groovy | 3 +- .../nostats_rf_prune/q10.groovy | 3 +- .../nostats_rf_prune/q11.groovy | 3 +- .../nostats_rf_prune/q12.groovy | 3 +- .../nostats_rf_prune/q13.groovy | 3 +- .../nostats_rf_prune/q14.groovy | 3 +- .../nostats_rf_prune/q15.groovy | 3 +- .../nostats_rf_prune/q16.groovy | 3 +- .../nostats_rf_prune/q17.groovy | 3 +- .../nostats_rf_prune/q18.groovy | 3 +- .../nostats_rf_prune/q19.groovy | 3 +- .../nostats_rf_prune/q2.groovy | 3 +- .../nostats_rf_prune/q20-rewrite.groovy | 3 +- .../nostats_rf_prune/q20.groovy | 3 +- .../nostats_rf_prune/q21.groovy | 3 +- .../nostats_rf_prune/q22.groovy | 3 +- .../nostats_rf_prune/q3.groovy | 3 +- .../nostats_rf_prune/q4.groovy | 3 +- .../nostats_rf_prune/q5.groovy | 3 +- .../nostats_rf_prune/q6.groovy | 3 +- .../nostats_rf_prune/q7.groovy | 3 +- .../nostats_rf_prune/q8.groovy | 3 +- .../nostats_rf_prune/q9.groovy | 3 +- .../rf_prune/q1.groovy | 3 +- .../rf_prune/q10.groovy | 3 +- .../rf_prune/q11.groovy | 3 +- .../rf_prune/q12.groovy | 3 +- .../rf_prune/q13.groovy | 3 +- .../rf_prune/q14.groovy | 3 +- .../rf_prune/q15.groovy | 3 +- .../rf_prune/q16.groovy | 3 +- .../rf_prune/q17.groovy | 3 +- .../rf_prune/q18.groovy | 3 +- .../rf_prune/q19.groovy | 3 +- .../rf_prune/q2.groovy | 3 +- .../rf_prune/q20-rewrite.groovy | 3 +- .../rf_prune/q20.groovy | 3 +- .../rf_prune/q21.groovy | 3 +- .../rf_prune/q22.groovy | 3 +- .../rf_prune/q3.groovy | 3 +- .../rf_prune/q4.groovy | 3 +- .../rf_prune/q5.groovy | 3 +- .../rf_prune/q6.groovy | 3 +- .../rf_prune/q7.groovy | 3 +- .../rf_prune/q8.groovy | 3 +- .../rf_prune/q9.groovy | 3 +- .../shape/q1.groovy | 3 +- .../shape/q10.groovy | 3 +- .../shape/q11.groovy | 3 +- .../shape/q12.groovy | 3 +- .../shape/q13.groovy | 3 +- .../shape/q14.groovy | 3 +- .../shape/q15.groovy | 3 +- .../shape/q16.groovy | 3 +- .../shape/q17.groovy | 3 +- .../shape/q18.groovy | 3 +- .../shape/q19.groovy | 3 +- .../shape/q2.groovy | 3 +- .../shape/q20-rewrite.groovy | 3 +- .../shape/q20.groovy | 3 +- .../shape/q21.groovy | 3 +- .../shape/q22.groovy | 3 +- .../shape/q3.groovy | 3 +- .../shape/q4.groovy | 3 +- .../shape/q5.groovy | 3 +- .../shape/q6.groovy | 3 +- .../shape/q7.groovy | 3 +- .../shape/q8.groovy | 3 +- .../shape/q9.groovy | 3 +- .../shape_no_stats/q1.groovy | 3 +- .../shape_no_stats/q10.groovy | 3 +- .../shape_no_stats/q11.groovy | 3 +- .../shape_no_stats/q12.groovy | 3 +- .../shape_no_stats/q13.groovy | 3 +- .../shape_no_stats/q14.groovy | 3 +- .../shape_no_stats/q15.groovy | 3 +- .../shape_no_stats/q16.groovy | 3 +- .../shape_no_stats/q17.groovy | 3 +- .../shape_no_stats/q18.groovy | 3 +- .../shape_no_stats/q19.groovy | 3 +- .../shape_no_stats/q2.groovy | 3 +- .../shape_no_stats/q20-rewrite.groovy | 3 +- .../shape_no_stats/q20.groovy | 3 +- .../shape_no_stats/q21.groovy | 3 +- .../shape_no_stats/q22.groovy | 3 +- .../shape_no_stats/q3.groovy | 3 +- .../shape_no_stats/q4.groovy | 3 +- .../shape_no_stats/q5.groovy | 3 +- .../shape_no_stats/q6.groovy | 3 +- .../shape_no_stats/q7.groovy | 3 +- .../shape_no_stats/q8.groovy | 3 +- .../shape_no_stats/q9.groovy | 3 +- 867 files changed, 5834 insertions(+), 5834 deletions(-) create mode 100644 be/src/runtime/runtime_query_statistics_mgr.cpp create mode 100644 be/src/runtime/runtime_query_statistics_mgr.h create mode 100644 fe/fe-core/src/main/java/org/apache/doris/resource/workloadschedpolicy/WorkloadRuntimeStatusMgr.java rename regression-test/data/fault_injection_p0/{test_wal_mem_back_pressure_fault_injection.csv.gz => test_low_wal_disk_space_fault_injection.csv.gz} (100%) create mode 100644 regression-test/suites/fault_injection_p0/test_low_wal_disk_space_fault_injection.groovy diff --git a/be/src/cloud/config.cpp b/be/src/cloud/config.cpp index 4d9da1e9cfc83e6..12a217dfcd05103 100644 --- a/be/src/cloud/config.cpp +++ b/be/src/cloud/config.cpp @@ -17,10 +17,8 @@ #include "cloud/config.h" -namespace doris { -namespace config { +namespace doris::config { -// TODO +DEFINE_String(cloud_unique_id, ""); -} // namespace config -} // namespace doris +} // namespace doris::config diff --git a/be/src/cloud/config.h b/be/src/cloud/config.h index 21a3b6052f59be6..0044ab11458b439 100644 --- a/be/src/cloud/config.h +++ b/be/src/cloud/config.h @@ -19,10 +19,12 @@ #include "common/config.h" -namespace doris { -namespace config { +namespace doris::config { -// TODO +DECLARE_String(cloud_unique_id); -} // namespace config -} // namespace doris +static inline bool is_cloud_mode() { + return !cloud_unique_id.empty(); +} + +} // namespace doris::config diff --git a/be/src/common/config.cpp b/be/src/common/config.cpp index 9f468e3edc24540..3cf5a277f3db2e0 100644 --- a/be/src/common/config.cpp +++ b/be/src/common/config.cpp @@ -776,8 +776,6 @@ DEFINE_Int64(open_load_stream_timeout_ms, "60000"); // 60s // timeout for load stream close wait in ms DEFINE_Int64(close_load_stream_timeout_ms, "600000"); // 10 min -// idle timeout for load stream in ms -DEFINE_mInt64(load_stream_idle_timeout_ms, "600000"); // brpc streaming max_buf_size in bytes DEFINE_Int64(load_stream_max_buf_size, "20971520"); // 20MB // brpc streaming messages_in_batch @@ -1161,6 +1159,8 @@ DEFINE_mInt64(enable_debug_log_timeout_secs, "0"); // Tolerance for the number of partition id 0 in rowset, default 0 DEFINE_Int32(ignore_invalid_partition_id_rowset_num, "0"); +DEFINE_mInt32(report_query_statistics_interval_ms, "3000"); + // clang-format off #ifdef BE_TEST // test s3 diff --git a/be/src/common/config.h b/be/src/common/config.h index 97f4dd5535dc1ce..298b2fd4bfd35c3 100644 --- a/be/src/common/config.h +++ b/be/src/common/config.h @@ -829,8 +829,6 @@ DECLARE_Int64(open_load_stream_timeout_ms); // timeout for load stream close wait in ms DECLARE_Int64(close_load_stream_timeout_ms); -// idle timeout for load stream in ms -DECLARE_Int64(load_stream_idle_timeout_ms); // brpc streaming max_buf_size in bytes DECLARE_Int64(load_stream_max_buf_size); // brpc streaming messages_in_batch @@ -1237,6 +1235,8 @@ DECLARE_mBool(enable_column_type_check); // Tolerance for the number of partition id 0 in rowset, default 0 DECLARE_Int32(ignore_invalid_partition_id_rowset_num); +DECLARE_mInt32(report_query_statistics_interval_ms); + #ifdef BE_TEST // test s3 DECLARE_String(test_s3_resource); diff --git a/be/src/common/daemon.cpp b/be/src/common/daemon.cpp index e3bf1a738b87a8c..5274808c9b8085f 100644 --- a/be/src/common/daemon.cpp +++ b/be/src/common/daemon.cpp @@ -45,9 +45,12 @@ #include "olap/storage_engine.h" #include "olap/tablet_manager.h" #include "runtime/block_spill_manager.h" +#include "runtime/client_cache.h" #include "runtime/exec_env.h" +#include "runtime/fragment_mgr.h" #include "runtime/memory/mem_tracker.h" #include "runtime/memory/mem_tracker_limiter.h" +#include "runtime/runtime_query_statistics_mgr.h" #include "runtime/task_group/task_group_manager.h" #include "util/cpu_info.h" #include "util/debug_util.h" @@ -352,6 +355,13 @@ void Daemon::block_spill_gc_thread() { } } +void Daemon::report_runtime_query_statistics_thread() { + while (!_stop_background_threads_latch.wait_for( + std::chrono::milliseconds(config::report_query_statistics_interval_ms))) { + ExecEnv::GetInstance()->runtime_query_statistics_mgr()->report_runtime_query_statistics(); + } +} + void Daemon::je_purge_dirty_pages_thread() const { do { std::unique_lock l(doris::MemInfo::je_purge_dirty_pages_lock); @@ -399,6 +409,9 @@ void Daemon::start() { st = Thread::create( "Daemon", "je_purge_dirty_pages_thread", [this]() { this->je_purge_dirty_pages_thread(); }, &_threads.emplace_back()); + st = Thread::create( + "Daemon", "query_runtime_statistics_thread", + [this]() { this->report_runtime_query_statistics_thread(); }, &_threads.emplace_back()); CHECK(st.ok()) << st; } diff --git a/be/src/common/daemon.h b/be/src/common/daemon.h index 18f78cbe5832503..e88dd737643c642 100644 --- a/be/src/common/daemon.h +++ b/be/src/common/daemon.h @@ -44,6 +44,7 @@ class Daemon { void calculate_metrics_thread(); void block_spill_gc_thread(); void je_purge_dirty_pages_thread() const; + void report_runtime_query_statistics_thread(); CountDownLatch _stop_background_threads_latch; std::vector> _threads; diff --git a/be/src/exec/data_sink.cpp b/be/src/exec/data_sink.cpp index 2742ccd163e5817..c58bbdb25238b65 100644 --- a/be/src/exec/data_sink.cpp +++ b/be/src/exec/data_sink.cpp @@ -54,14 +54,10 @@ Status DataSink::create_data_sink(ObjectPool* pool, const TDataSink& thrift_sink if (!thrift_sink.__isset.stream_sink) { return Status::InternalError("Missing data stream sink."); } - bool send_query_statistics_with_every_batch = - params.__isset.send_query_statistics_with_every_batch - ? params.send_query_statistics_with_every_batch - : false; // TODO: figure out good buffer size based on size of output row sink->reset(new vectorized::VDataStreamSender(state, pool, params.sender_id, row_desc, - thrift_sink.stream_sink, params.destinations, - send_query_statistics_with_every_batch)); + thrift_sink.stream_sink, + params.destinations)); // RETURN_IF_ERROR(sender->prepare(state->obj_pool(), thrift_sink.stream_sink)); break; } @@ -82,16 +78,11 @@ Status DataSink::create_data_sink(ObjectPool* pool, const TDataSink& thrift_sink } // TODO: figure out good buffer size based on size of output row - bool send_query_statistics_with_every_batch = - params.__isset.send_query_statistics_with_every_batch - ? params.send_query_statistics_with_every_batch - : false; // Result file sink is not the top sink if (params.__isset.destinations && params.destinations.size() > 0) { sink->reset(new doris::vectorized::VResultFileSink( state, pool, params.sender_id, row_desc, thrift_sink.result_file_sink, - params.destinations, send_query_statistics_with_every_batch, output_exprs, - desc_tbl)); + params.destinations, output_exprs, desc_tbl)); } else { sink->reset(new doris::vectorized::VResultFileSink(row_desc, output_exprs)); } @@ -201,14 +192,10 @@ Status DataSink::create_data_sink(ObjectPool* pool, const TDataSink& thrift_sink if (!thrift_sink.__isset.stream_sink) { return Status::InternalError("Missing data stream sink."); } - bool send_query_statistics_with_every_batch = - params.__isset.send_query_statistics_with_every_batch - ? params.send_query_statistics_with_every_batch - : false; // TODO: figure out good buffer size based on size of output row - *sink = std::make_unique( - state, pool, local_params.sender_id, row_desc, thrift_sink.stream_sink, - params.destinations, send_query_statistics_with_every_batch); + *sink = std::make_unique(state, pool, local_params.sender_id, + row_desc, thrift_sink.stream_sink, + params.destinations); // RETURN_IF_ERROR(sender->prepare(state->obj_pool(), thrift_sink.stream_sink)); break; } @@ -229,16 +216,11 @@ Status DataSink::create_data_sink(ObjectPool* pool, const TDataSink& thrift_sink } // TODO: figure out good buffer size based on size of output row - bool send_query_statistics_with_every_batch = - params.__isset.send_query_statistics_with_every_batch - ? params.send_query_statistics_with_every_batch - : false; // Result file sink is not the top sink if (params.__isset.destinations && params.destinations.size() > 0) { sink->reset(new doris::vectorized::VResultFileSink( state, pool, local_params.sender_id, row_desc, thrift_sink.result_file_sink, - params.destinations, send_query_statistics_with_every_batch, output_exprs, - desc_tbl)); + params.destinations, output_exprs, desc_tbl)); } else { sink->reset(new doris::vectorized::VResultFileSink(row_desc, output_exprs)); } diff --git a/be/src/exec/data_sink.h b/be/src/exec/data_sink.h index 6faef8f8a7800b7..3bf72ae545071a8 100644 --- a/be/src/exec/data_sink.h +++ b/be/src/exec/data_sink.h @@ -36,7 +36,6 @@ class ObjectPool; class RuntimeState; class TPlanFragmentExecParams; class DescriptorTbl; -class QueryStatistics; class TDataSink; class TExpr; class TPipelineFragmentParams; @@ -104,10 +103,6 @@ class DataSink { // Returns the runtime profile for the sink. RuntimeProfile* profile() { return _profile; } - virtual void set_query_statistics(std::shared_ptr statistics) { - _query_statistics = statistics; - } - const RowDescriptor& row_desc() { return _row_desc; } virtual bool can_write() { return true; } @@ -124,9 +119,6 @@ class DataSink { RuntimeProfile* _profile = nullptr; // Allocated from _pool - // Maybe this will be transferred to BufferControlBlock. - std::shared_ptr _query_statistics; - RuntimeProfile::Counter* _exec_timer = nullptr; RuntimeProfile::Counter* _blocks_sent_counter = nullptr; RuntimeProfile::Counter* _output_rows_counter = nullptr; diff --git a/be/src/exec/exec_node.cpp b/be/src/exec/exec_node.cpp index 7d38ee5e651611c..6bc6e07c5631c6c 100644 --- a/be/src/exec/exec_node.cpp +++ b/be/src/exec/exec_node.cpp @@ -74,7 +74,6 @@ #include "vec/utils/util.hpp" namespace doris { -class QueryStatistics; const std::string ExecNode::ROW_THROUGHPUT_COUNTER = "RowsProducedRate"; @@ -96,6 +95,7 @@ ExecNode::ExecNode(ObjectPool* pool, const TPlanNode& tnode, const DescriptorTbl if (tnode.__isset.output_tuple_id) { _output_row_descriptor.reset(new RowDescriptor(descs, {tnode.output_tuple_id}, {true})); } + _query_statistics = std::make_shared(); } ExecNode::~ExecNode() = default; @@ -176,22 +176,6 @@ Status ExecNode::reset(RuntimeState* state) { return Status::OK(); } -Status ExecNode::collect_query_statistics(QueryStatistics* statistics) { - DCHECK(statistics != nullptr); - for (auto child_node : _children) { - RETURN_IF_ERROR(child_node->collect_query_statistics(statistics)); - } - return Status::OK(); -} - -Status ExecNode::collect_query_statistics(QueryStatistics* statistics, int sender_id) { - DCHECK(statistics != nullptr); - for (auto child_node : _children) { - RETURN_IF_ERROR(child_node->collect_query_statistics(statistics, sender_id)); - } - return Status::OK(); -} - void ExecNode::release_resource(doris::RuntimeState* state) { if (!_is_resource_released) { if (_rows_returned_counter != nullptr) { @@ -276,6 +260,9 @@ Status ExecNode::create_tree_helper(RuntimeState* state, ObjectPool* pool, // Step 1 Create current ExecNode according to current thrift plan node. ExecNode* cur_exec_node = nullptr; RETURN_IF_ERROR(create_node(state, pool, cur_plan_node, descs, &cur_exec_node)); + if (cur_exec_node != nullptr) { + state->get_query_ctx()->register_query_statistics(cur_exec_node->get_query_statistics()); + } // Step 1.1 // Record current node if we have parent or record myself as root node. diff --git a/be/src/exec/exec_node.h b/be/src/exec/exec_node.h index aaa2f6ee07f20d7..f4b49cba6f56fe6 100644 --- a/be/src/exec/exec_node.h +++ b/be/src/exec/exec_node.h @@ -156,13 +156,6 @@ class ExecNode { // so should be fast. [[nodiscard]] virtual Status reset(RuntimeState* state); - // This should be called before close() and after get_next(), it is responsible for - // collecting statistics sent with row batch, it can't be called when prepare() returns - // error. - [[nodiscard]] virtual Status collect_query_statistics(QueryStatistics* statistics); - - [[nodiscard]] virtual Status collect_query_statistics(QueryStatistics* statistics, - int sender_id); // close() will get called for every exec node, regardless of what else is called and // the status of these calls (i.e. prepare() may never have been called, or // prepare()/open()/get_next() returned with an error). @@ -243,6 +236,8 @@ class ExecNode { // such as send the last buffer to remote. virtual Status try_close(RuntimeState* state) { return Status::OK(); } + std::shared_ptr get_query_statistics() { return _query_statistics; } + protected: friend class DataSink; @@ -330,6 +325,8 @@ class ExecNode { std::atomic _can_read = false; + std::shared_ptr _query_statistics = nullptr; + private: static Status create_tree_helper(RuntimeState* state, ObjectPool* pool, const std::vector& tnodes, diff --git a/be/src/exprs/bloom_filter_func.h b/be/src/exprs/bloom_filter_func.h index 3c60ccc89c7731e..71dc3f6e663ab1c 100644 --- a/be/src/exprs/bloom_filter_func.h +++ b/be/src/exprs/bloom_filter_func.h @@ -312,13 +312,22 @@ struct CommonFindOp { void find_batch(const BloomFilterAdaptor& bloom_filter, const vectorized::ColumnPtr& column, uint8_t* results) const { + const T* __restrict data = nullptr; + const uint8_t* __restrict nullmap = nullptr; if (column->is_nullable()) { const auto* nullable = assert_cast(column.get()); - const auto& nullmap = - assert_cast(nullable->get_null_map_column()) - .get_data(); + if (nullable->has_null()) { + nullmap = + assert_cast(nullable->get_null_map_column()) + .get_data() + .data(); + } + data = (T*)nullable->get_nested_column().get_raw_data().data; + } else { + data = (T*)column->get_raw_data().data; + } - const T* data = (T*)nullable->get_nested_column().get_raw_data().data; + if (nullmap) { for (size_t i = 0; i < column->size(); i++) { if (!nullmap[i]) { results[i] = bloom_filter.test_element(data[i]); @@ -327,7 +336,6 @@ struct CommonFindOp { } } } else { - const T* data = (T*)column->get_raw_data().data; for (size_t i = 0; i < column->size(); i++) { results[i] = bloom_filter.test_element(data[i]); } @@ -340,8 +348,8 @@ struct CommonFindOp { }; struct StringFindOp : CommonFindOp { - void insert_batch(BloomFilterAdaptor& bloom_filter, const vectorized::ColumnPtr& column, - size_t start) { + static void insert_batch(BloomFilterAdaptor& bloom_filter, const vectorized::ColumnPtr& column, + size_t start) { if (column->is_nullable()) { const auto* nullable = assert_cast(column.get()); const auto& col = @@ -363,8 +371,8 @@ struct StringFindOp : CommonFindOp { } } - void find_batch(const BloomFilterAdaptor& bloom_filter, const vectorized::ColumnPtr& column, - uint8_t* results) { + static void find_batch(const BloomFilterAdaptor& bloom_filter, + const vectorized::ColumnPtr& column, uint8_t* results) { if (column->is_nullable()) { const auto* nullable = assert_cast(column.get()); const auto& col = @@ -372,12 +380,17 @@ struct StringFindOp : CommonFindOp { const auto& nullmap = assert_cast(nullable->get_null_map_column()) .get_data(); - - for (size_t i = 0; i < column->size(); i++) { - if (!nullmap[i]) { + if (nullable->has_null()) { + for (size_t i = 0; i < column->size(); i++) { + if (!nullmap[i]) { + results[i] = bloom_filter.test_element(col.get_data_at(i)); + } else { + results[i] = false; + } + } + } else { + for (size_t i = 0; i < column->size(); i++) { results[i] = bloom_filter.test_element(col.get_data_at(i)); - } else { - results[i] = false; } } } else { @@ -392,9 +405,9 @@ struct StringFindOp : CommonFindOp { // We do not need to judge whether data is empty, because null will not appear // when filer used by the storage engine struct FixedStringFindOp : public StringFindOp { - uint16_t find_batch_olap_engine(const BloomFilterAdaptor& bloom_filter, const char* data, - const uint8* nullmap, uint16_t* offsets, int number, - const bool is_parse_column) { + static uint16_t find_batch_olap_engine(const BloomFilterAdaptor& bloom_filter, const char* data, + const uint8* nullmap, uint16_t* offsets, int number, + const bool is_parse_column) { return find_batch_olap(bloom_filter, data, nullmap, offsets, number, is_parse_column); } diff --git a/be/src/exprs/runtime_filter_slots.h b/be/src/exprs/runtime_filter_slots.h index 495ac28e762d8ed..4859734a6a4921b 100644 --- a/be/src/exprs/runtime_filter_slots.h +++ b/be/src/exprs/runtime_filter_slots.h @@ -57,7 +57,7 @@ class VRuntimeFilterSlots { throw Exception(ErrorCode::INTERNAL_ERROR, "filters empty, filter_id={}", filter_id); } - for (auto filter : filters) { + for (auto* filter : filters) { filter->set_ignored(""); filter->signal(); } @@ -166,7 +166,7 @@ class VRuntimeFilterSlots { return Status::OK(); } - void insert(const std::unordered_set& datas) { + void insert(const vectorized::Block* block) { for (int i = 0; i < _build_expr_context.size(); ++i) { auto iter = _runtime_filters.find(i); if (iter == _runtime_filters.end()) { @@ -174,18 +174,16 @@ class VRuntimeFilterSlots { } int result_column_id = _build_expr_context[i]->get_last_result_column_id(); - for (const auto* it : datas) { - auto column = it->get_by_position(result_column_id).column; - for (auto* filter : iter->second) { - filter->insert_batch(column, 1); - } + const auto& column = block->get_by_position(result_column_id).column; + for (auto* filter : iter->second) { + filter->insert_batch(column, 1); } } } bool ready_finish_publish() { for (auto& pair : _runtime_filters) { - for (auto filter : pair.second) { + for (auto* filter : pair.second) { if (!filter->is_finish_rpc()) { return false; } @@ -196,7 +194,7 @@ class VRuntimeFilterSlots { void finish_publish() { for (auto& pair : _runtime_filters) { - for (auto filter : pair.second) { + for (auto* filter : pair.second) { static_cast(filter->join_rpc()); } } diff --git a/be/src/olap/wal/wal_manager.cpp b/be/src/olap/wal/wal_manager.cpp index b1931f62a628e45..621a1aa080607bc 100644 --- a/be/src/olap/wal/wal_manager.cpp +++ b/be/src/olap/wal/wal_manager.cpp @@ -352,8 +352,10 @@ Status WalManager::add_recover_wal(int64_t db_id, int64_t table_id, int64_t wal_ } table_ptr->add_wal(wal_id, wal); #ifndef BE_TEST - RETURN_IF_ERROR(update_wal_dir_limit(_get_base_wal_path(wal))); - RETURN_IF_ERROR(update_wal_dir_used(_get_base_wal_path(wal))); + WARN_IF_ERROR(update_wal_dir_limit(_get_base_wal_path(wal)), + "Failed to update wal dir limit while add recover wal!"); + WARN_IF_ERROR(update_wal_dir_used(_get_base_wal_path(wal)), + "Failed to update wal dir used while add recove wal!"); #endif return Status::OK(); } diff --git a/be/src/pipeline/exec/exchange_sink_buffer.cpp b/be/src/pipeline/exec/exchange_sink_buffer.cpp index 1f579b6a971054c..4484a34375b5398 100644 --- a/be/src/pipeline/exec/exchange_sink_buffer.cpp +++ b/be/src/pipeline/exec/exchange_sink_buffer.cpp @@ -106,7 +106,7 @@ void ExchangeSinkBuffer::close() { template bool ExchangeSinkBuffer::can_write() const { - size_t max_package_size = 64 * _instance_to_package_queue.size(); + size_t max_package_size = QUEUE_CAPACITY_FACTOR * _instance_to_package_queue.size(); size_t total_package_size = 0; for (auto& [_, q] : _instance_to_package_queue) { total_package_size += q.size(); @@ -255,10 +255,6 @@ Status ExchangeSinkBuffer::_send_rpc(InstanceLoId id) { auto& brpc_request = _instance_to_request[id]; brpc_request->set_eos(request.eos); brpc_request->set_packet_seq(_instance_to_seq[id]++); - if (_statistics && _statistics->collected()) { - auto statistic = brpc_request->mutable_query_statistics(); - _statistics->to_pb(statistic); - } if (request.block) { brpc_request->set_allocated_block(request.block.get()); } @@ -325,10 +321,6 @@ Status ExchangeSinkBuffer::_send_rpc(InstanceLoId id) { if (request.block_holder->get_block()) { brpc_request->set_allocated_block(request.block_holder->get_block()); } - if (_statistics && _statistics->collected()) { - auto statistic = brpc_request->mutable_query_statistics(); - _statistics->to_pb(statistic); - } auto send_callback = request.channel->get_send_callback(id, request.eos); ExchangeRpcContext rpc_ctx; diff --git a/be/src/pipeline/exec/exchange_sink_buffer.h b/be/src/pipeline/exec/exchange_sink_buffer.h index 83b20f9c8a808e3..a11b637f4d4df2f 100644 --- a/be/src/pipeline/exec/exchange_sink_buffer.h +++ b/be/src/pipeline/exec/exchange_sink_buffer.h @@ -215,7 +215,6 @@ class ExchangeSinkBuffer { _queue_dependency = queue_dependency; _finish_dependency = finish_dependency; } - void set_query_statistics(QueryStatistics* statistics) { _statistics = statistics; } void set_should_stop() { _should_stop = true; diff --git a/be/src/pipeline/exec/exchange_sink_operator.cpp b/be/src/pipeline/exec/exchange_sink_operator.cpp index 43bec0bd92d2e53..aa29bb4cf0f935b 100644 --- a/be/src/pipeline/exec/exchange_sink_operator.cpp +++ b/be/src/pipeline/exec/exchange_sink_operator.cpp @@ -67,7 +67,6 @@ Status ExchangeSinkOperator::prepare(RuntimeState* state) { _sink_buffer = std::make_unique>( id, _dest_node_id, _sink->_sender_id, _state->be_number(), state->get_query_ctx()); - _sink_buffer->set_query_statistics(_sink->query_statistics()); RETURN_IF_ERROR(DataSinkOperator::prepare(state)); _sink->register_pipeline_channels(_sink_buffer.get()); return Status::OK(); @@ -135,14 +134,12 @@ Status ExchangeSinkLocalState::init(RuntimeState* state, LocalSinkStateInfo& inf std::map fragment_id_to_channel_index; for (int i = 0; i < p._dests.size(); ++i) { - // Select first dest as transfer chain. - bool is_transfer_chain = (i == 0); const auto& fragment_instance_id = p._dests[i].fragment_instance_id; if (fragment_id_to_channel_index.find(fragment_instance_id.lo) == fragment_id_to_channel_index.end()) { channel_shared_ptrs.emplace_back(new vectorized::PipChannel( this, p._row_desc, p._dests[i].brpc_server, fragment_instance_id, - p._dest_node_id, is_transfer_chain, p._send_query_statistics_with_every_batch)); + p._dest_node_id)); fragment_id_to_channel_index.emplace(fragment_instance_id.lo, channel_shared_ptrs.size() - 1); channels.push_back(channel_shared_ptrs.back().get()); @@ -258,14 +255,12 @@ segment_v2::CompressionTypePB ExchangeSinkLocalState::compression_type() const { ExchangeSinkOperatorX::ExchangeSinkOperatorX( RuntimeState* state, const RowDescriptor& row_desc, int operator_id, - const TDataStreamSink& sink, const std::vector& destinations, - bool send_query_statistics_with_every_batch) + const TDataStreamSink& sink, const std::vector& destinations) : DataSinkOperatorX(operator_id, sink.dest_node_id), _texprs(sink.output_partition.partition_exprs), _row_desc(row_desc), _part_type(sink.output_partition.type), _dests(destinations), - _send_query_statistics_with_every_batch(send_query_statistics_with_every_batch), _dest_node_id(sink.dest_node_id), _transfer_large_data_by_brpc(config::transfer_large_data_by_brpc) { DCHECK_GT(destinations.size(), 0); diff --git a/be/src/pipeline/exec/exchange_sink_operator.h b/be/src/pipeline/exec/exchange_sink_operator.h index 24fe9e1d84c4406..6d1d1b6a4feaa65 100644 --- a/be/src/pipeline/exec/exchange_sink_operator.h +++ b/be/src/pipeline/exec/exchange_sink_operator.h @@ -203,8 +203,7 @@ class ExchangeSinkOperatorX final : public DataSinkOperatorX& destinations, - bool send_query_statistics_with_every_batch); + const std::vector& destinations); Status init(const TDataSink& tsink) override; RuntimeState* state() { return _state; } @@ -244,7 +243,6 @@ class ExchangeSinkOperatorX final : public DataSinkOperatorX _dests; - const bool _send_query_statistics_with_every_batch; std::unique_ptr _mem_tracker; // Identifier of the destination plan node. diff --git a/be/src/pipeline/exec/exchange_source_operator.cpp b/be/src/pipeline/exec/exchange_source_operator.cpp index 255cb151410fda1..73aa7a22e819695 100644 --- a/be/src/pipeline/exec/exchange_source_operator.cpp +++ b/be/src/pipeline/exec/exchange_source_operator.cpp @@ -73,7 +73,7 @@ Status ExchangeLocalState::init(RuntimeState* state, LocalStateInfo& info) { auto& p = _parent->cast(); stream_recvr = state->exec_env()->vstream_mgr()->create_recvr( state, p.input_row_desc(), state->fragment_instance_id(), p.node_id(), p.num_senders(), - profile(), p.is_merging(), p.sub_plan_query_statistics_recvr()); + profile(), p.is_merging()); auto* source_dependency = _dependency; const auto& queues = stream_recvr->sender_queues(); deps.resize(queues.size()); @@ -133,7 +133,6 @@ Status ExchangeSourceOperatorX::init(const TPlanNode& tnode, RuntimeState* state Status ExchangeSourceOperatorX::prepare(RuntimeState* state) { RETURN_IF_ERROR(OperatorX::prepare(state)); DCHECK_GT(_num_senders, 0); - _sub_plan_query_statistics_recvr.reset(new QueryStatisticsRecvr()); if (_is_merging) { RETURN_IF_ERROR(_vsort_exec_exprs.prepare(state, _row_descriptor, _row_descriptor)); diff --git a/be/src/pipeline/exec/exchange_source_operator.h b/be/src/pipeline/exec/exchange_source_operator.h index bbccfe52987ad54..3340691f7eeb9f4 100644 --- a/be/src/pipeline/exec/exchange_source_operator.h +++ b/be/src/pipeline/exec/exchange_source_operator.h @@ -113,10 +113,6 @@ class ExchangeSourceOperatorX final : public OperatorX { [[nodiscard]] int num_senders() const { return _num_senders; } [[nodiscard]] bool is_merging() const { return _is_merging; } - std::shared_ptr sub_plan_query_statistics_recvr() { - return _sub_plan_query_statistics_recvr; - } - DataDistribution required_data_distribution() const override { if (OperatorX::ignore_data_distribution()) { return {ExchangeType::NOOP}; @@ -134,7 +130,6 @@ class ExchangeSourceOperatorX final : public OperatorX { const bool _is_merging; const TPartitionType::type _partition_type; RowDescriptor _input_row_desc; - std::shared_ptr _sub_plan_query_statistics_recvr; // use in merge sort size_t _offset; diff --git a/be/src/pipeline/exec/hashjoin_build_sink.cpp b/be/src/pipeline/exec/hashjoin_build_sink.cpp index 757673c70a8cabc..f02e203c7832a55 100644 --- a/be/src/pipeline/exec/hashjoin_build_sink.cpp +++ b/be/src/pipeline/exec/hashjoin_build_sink.cpp @@ -484,29 +484,13 @@ Status HashJoinBuildSinkOperatorX::sink(RuntimeState* state, vectorized::Block* local_state._build_side_mutable_block.to_block()); COUNTER_UPDATE(local_state._build_blocks_memory_usage, (*local_state._shared_state->build_block).bytes()); - RETURN_IF_ERROR( - local_state.process_build_block(state, (*local_state._shared_state->build_block))); const bool use_global_rf = local_state._parent->cast()._use_global_rf; - auto ret = std::visit( - Overload {[&](std::monostate&) -> Status { - LOG(FATAL) << "FATAL: uninited hash table"; - __builtin_unreachable(); - }, - [&](auto&& arg) -> Status { - vectorized::ProcessRuntimeFilterBuild runtime_filter_build_process; - return runtime_filter_build_process(state, arg, &local_state, - use_global_rf); - }}, - *local_state._shared_state->hash_table_variants); - if (!ret.ok()) { - if (_shared_hashtable_controller) { - _shared_hash_table_context->status = ret; - _shared_hashtable_controller->signal(node_id()); - } - return ret; - } + RETURN_IF_ERROR(vectorized::process_runtime_filter_build( + state, local_state._shared_state->build_block.get(), &local_state, use_global_rf)); + RETURN_IF_ERROR( + local_state.process_build_block(state, (*local_state._shared_state->build_block))); if (_shared_hashtable_controller) { _shared_hash_table_context->status = Status::OK(); // arena will be shared with other instances. diff --git a/be/src/pipeline/exec/hashjoin_build_sink.h b/be/src/pipeline/exec/hashjoin_build_sink.h index 5ea504d488daf92..3c1b772b30abff4 100644 --- a/be/src/pipeline/exec/hashjoin_build_sink.h +++ b/be/src/pipeline/exec/hashjoin_build_sink.h @@ -94,7 +94,10 @@ class HashJoinBuildSinkLocalState final friend class HashJoinBuildSinkOperatorX; template friend struct vectorized::ProcessHashTableBuild; - friend struct vectorized::ProcessRuntimeFilterBuild; + template + friend Status vectorized::process_runtime_filter_build(RuntimeState* state, + vectorized::Block* block, Parent* parent, + bool is_global); // build expr vectorized::VExprContextSPtrs _build_expr_ctxs; @@ -107,7 +110,6 @@ class HashJoinBuildSinkLocalState final std::shared_ptr _runtime_filter_slots; bool _has_set_need_null_map_for_build = false; bool _build_side_ignore_null = false; - std::unordered_set _inserted_blocks; std::shared_ptr _shared_hash_table_dependency; std::vector _build_col_ids; diff --git a/be/src/pipeline/exec/operator.h b/be/src/pipeline/exec/operator.h index bb0b8f30911d64d..80184374b77eaf0 100644 --- a/be/src/pipeline/exec/operator.h +++ b/be/src/pipeline/exec/operator.h @@ -174,14 +174,6 @@ class OperatorBase { virtual bool is_source() const; - virtual Status collect_query_statistics(QueryStatistics* statistics) { return Status::OK(); }; - - virtual Status collect_query_statistics(QueryStatistics* statistics, int sender_id) { - return Status::OK(); - }; - - virtual void set_query_statistics(std::shared_ptr) {}; - virtual Status init(const TDataSink& tsink) { return Status::OK(); } // Prepare for running. (e.g. resource allocation, etc.) @@ -317,9 +309,6 @@ class DataSinkOperator : public OperatorBase { } [[nodiscard]] RuntimeProfile* get_runtime_profile() const override { return _sink->profile(); } - void set_query_statistics(std::shared_ptr statistics) override { - _sink->set_query_statistics(statistics); - } protected: DataSinkType* _sink = nullptr; @@ -385,16 +374,6 @@ class StreamingOperator : public OperatorBase { return _node->runtime_profile(); } - Status collect_query_statistics(QueryStatistics* statistics) override { - RETURN_IF_ERROR(_node->collect_query_statistics(statistics)); - return Status::OK(); - } - - Status collect_query_statistics(QueryStatistics* statistics, int sender_id) override { - RETURN_IF_ERROR(_node->collect_query_statistics(statistics, sender_id)); - return Status::OK(); - } - protected: StreamingNodeType* _node = nullptr; bool _use_projection; diff --git a/be/src/pipeline/exec/result_file_sink_operator.cpp b/be/src/pipeline/exec/result_file_sink_operator.cpp index 11045c5f06935c0..2b095748b15e813 100644 --- a/be/src/pipeline/exec/result_file_sink_operator.cpp +++ b/be/src/pipeline/exec/result_file_sink_operator.cpp @@ -59,13 +59,11 @@ ResultFileSinkOperatorX::ResultFileSinkOperatorX(int operator_id, const RowDescr ResultFileSinkOperatorX::ResultFileSinkOperatorX( int operator_id, const RowDescriptor& row_desc, const TResultFileSink& sink, const std::vector& destinations, - bool send_query_statistics_with_every_batch, const std::vector& t_output_expr, - DescriptorTbl& descs) + const std::vector& t_output_expr, DescriptorTbl& descs) : DataSinkOperatorX(operator_id, 0), _row_desc(row_desc), _t_output_expr(t_output_expr), _dests(destinations), - _send_query_statistics_with_every_batch(send_query_statistics_with_every_batch), _output_row_descriptor(descs.get_tuple_descriptor(sink.output_tuple_id), false), _is_top_sink(false) { CHECK_EQ(destinations.size(), 1); @@ -134,10 +132,9 @@ Status ResultFileSinkLocalState::init(RuntimeState* state, LocalSinkStateInfo& i std::map fragment_id_to_channel_index; for (int i = 0; i < p._dests.size(); ++i) { - _channels.push_back(new vectorized::Channel( - this, p._row_desc, p._dests[i].brpc_server, state->fragment_instance_id(), - info.tsink.result_file_sink.dest_node_id, false, - p._send_query_statistics_with_every_batch)); + _channels.push_back(new vectorized::Channel(this, p._row_desc, p._dests[i].brpc_server, + state->fragment_instance_id(), + info.tsink.result_file_sink.dest_node_id)); } std::random_device rd; std::mt19937 g(rd()); @@ -187,7 +184,7 @@ Status ResultFileSinkLocalState::close(RuntimeState* state, Status exec_status) if (p._is_top_sink) { // close sender, this is normal path end if (_sender) { - _sender->update_num_written_rows(_writer == nullptr ? 0 : _writer->get_written_rows()); + _sender->update_return_rows(_writer == nullptr ? 0 : _writer->get_written_rows()); static_cast(_sender->close(final_status)); } static_cast(state->exec_env()->result_mgr()->cancel_at_time( diff --git a/be/src/pipeline/exec/result_file_sink_operator.h b/be/src/pipeline/exec/result_file_sink_operator.h index f064b8f2b7f9228..57e1e8c9147acd1 100644 --- a/be/src/pipeline/exec/result_file_sink_operator.h +++ b/be/src/pipeline/exec/result_file_sink_operator.h @@ -94,7 +94,6 @@ class ResultFileSinkOperatorX final : public DataSinkOperatorX& destinations, - bool send_query_statistics_with_every_batch, const std::vector& t_output_expr, DescriptorTbl& descs); Status init(const TDataSink& thrift_sink) override; @@ -113,7 +112,6 @@ class ResultFileSinkOperatorX final : public DataSinkOperatorX& _t_output_expr; const std::vector _dests; - bool _send_query_statistics_with_every_batch; // set file options when sink type is FILE std::unique_ptr _file_opts; diff --git a/be/src/pipeline/exec/result_sink_operator.cpp b/be/src/pipeline/exec/result_sink_operator.cpp index 8dc6eed299899ff..8b3afb1908f306f 100644 --- a/be/src/pipeline/exec/result_sink_operator.cpp +++ b/be/src/pipeline/exec/result_sink_operator.cpp @@ -180,9 +180,8 @@ Status ResultSinkLocalState::close(RuntimeState* state, Status exec_status) { // close sender, this is normal path end if (_sender) { if (_writer) { - _sender->update_num_written_rows(_writer->get_written_rows()); + _sender->update_return_rows(_writer->get_written_rows()); } - _sender->update_max_peak_memory_bytes(); static_cast(_sender->close(final_status)); } static_cast(state->exec_env()->result_mgr()->cancel_at_time( diff --git a/be/src/pipeline/exec/scan_operator.cpp b/be/src/pipeline/exec/scan_operator.cpp index 851e0ad3250ab78..c92b2cbfcbfe99e 100644 --- a/be/src/pipeline/exec/scan_operator.cpp +++ b/be/src/pipeline/exec/scan_operator.cpp @@ -141,13 +141,6 @@ Status ScanLocalState::init(RuntimeState* state, LocalStateInfo& info) // could add here, not in the _init_profile() function _prepare_rf_timer(_runtime_profile.get()); - static const std::string timer_name = "WaitForDependencyTime"; - _wait_for_dependency_timer = ADD_TIMER_WITH_LEVEL(_runtime_profile, timer_name, 1); - _wait_for_data_timer = - ADD_CHILD_TIMER_WITH_LEVEL(_runtime_profile, "WaitForData", timer_name, 1); - _wait_for_scanner_done_timer = - ADD_CHILD_TIMER_WITH_LEVEL(_runtime_profile, "WaitForScannerDone", timer_name, 1); - _wait_for_eos_timer = ADD_CHILD_TIMER_WITH_LEVEL(_runtime_profile, "WaitForEos", timer_name, 1); _wait_for_rf_timer = ADD_TIMER(_runtime_profile, "WaitForRuntimeFilter"); return Status::OK(); } @@ -1213,6 +1206,9 @@ Status ScanLocalState::_prepare_scanners() { _eos = true; _scan_dependency->set_ready(); } else { + for (auto& scanner : scanners) { + scanner->set_query_statistics(_query_statistics.get()); + } COUNTER_SET(_num_scanners, static_cast(scanners.size())); RETURN_IF_ERROR(_start_scanners(_scanners)); } diff --git a/be/src/pipeline/exec/scan_operator.h b/be/src/pipeline/exec/scan_operator.h index fa7e44a8fb78950..ec2d68e7649858f 100644 --- a/be/src/pipeline/exec/scan_operator.h +++ b/be/src/pipeline/exec/scan_operator.h @@ -169,10 +169,6 @@ class ScanLocalStateBase : public PipelineXLocalState, RuntimeProfile::Counter* _total_throughput_counter = nullptr; RuntimeProfile::Counter* _num_scanners = nullptr; - RuntimeProfile::Counter* _wait_for_data_timer = nullptr; - RuntimeProfile::Counter* _wait_for_scanner_done_timer = nullptr; - // time of prefilter input block from scanner - RuntimeProfile::Counter* _wait_for_eos_timer = nullptr; RuntimeProfile::Counter* _wait_for_rf_timer = nullptr; }; diff --git a/be/src/pipeline/pipeline.h b/be/src/pipeline/pipeline.h index ab6850b704bd132..148191f4e2dd0c8 100644 --- a/be/src/pipeline/pipeline.h +++ b/be/src/pipeline/pipeline.h @@ -115,12 +115,6 @@ class Pipeline : public std::enable_shared_from_this { [[nodiscard]] PipelineId id() const { return _pipeline_id; } void set_is_root_pipeline() { _is_root_pipeline = true; } bool is_root_pipeline() const { return _is_root_pipeline; } - void set_collect_query_statistics_with_every_batch() { - _collect_query_statistics_with_every_batch = true; - } - [[nodiscard]] bool collect_query_statistics_with_every_batch() const { - return _collect_query_statistics_with_every_batch; - } static bool is_hash_exchange(ExchangeType idx) { return idx == ExchangeType::HASH_SHUFFLE || idx == ExchangeType::BUCKET_HASH_SHUFFLE; @@ -235,7 +229,6 @@ class Pipeline : public std::enable_shared_from_this { bool _always_can_read = false; bool _always_can_write = false; bool _is_root_pipeline = false; - bool _collect_query_statistics_with_every_batch = false; // Input data distribution of this pipeline. We do local exchange when input data distribution // does not match the target data distribution. diff --git a/be/src/pipeline/pipeline_fragment_context.cpp b/be/src/pipeline/pipeline_fragment_context.cpp index 695fd6f4d3d25d4..b9c2382ce861629 100644 --- a/be/src/pipeline/pipeline_fragment_context.cpp +++ b/be/src/pipeline/pipeline_fragment_context.cpp @@ -342,7 +342,6 @@ Status PipelineFragmentContext::prepare(const doris::TPipelineFragmentParams& re _root_pipeline = fragment_context->add_pipeline(); _root_pipeline->set_is_root_pipeline(); - _root_pipeline->set_collect_query_statistics_with_every_batch(); RETURN_IF_ERROR(_build_pipelines(_root_plan, _root_pipeline)); if (_sink) { // DataSinkOperator is builded here @@ -854,7 +853,7 @@ Status PipelineFragmentContext::_create_sink(int sender_id, const TDataSink& thr _multi_cast_stream_sink_senders[i].reset(new vectorized::VDataStreamSender( _runtime_state.get(), _runtime_state->obj_pool(), sender_id, row_desc, thrift_sink.multi_cast_stream_sink.sinks[i], - thrift_sink.multi_cast_stream_sink.destinations[i], false)); + thrift_sink.multi_cast_stream_sink.destinations[i])); // 2. create and set the source operator of multi_cast_data_stream_source for new pipeline OperatorBuilderPtr source_op = @@ -945,7 +944,7 @@ Status PipelineFragmentContext::send_report(bool done) { std::bind(&PipelineFragmentContext::update_status, this, std::placeholders::_1), std::bind(&PipelineFragmentContext::cancel, this, std::placeholders::_1, std::placeholders::_2), - _dml_query_statistics()}, + _query_ctx->get_query_statistics()}, std::dynamic_pointer_cast(shared_from_this())); } diff --git a/be/src/pipeline/pipeline_fragment_context.h b/be/src/pipeline/pipeline_fragment_context.h index 2a3a11d59cc1078..353e7a0658685e2 100644 --- a/be/src/pipeline/pipeline_fragment_context.h +++ b/be/src/pipeline/pipeline_fragment_context.h @@ -150,10 +150,6 @@ class PipelineFragmentContext : public TaskExecutionContext { uint64_t create_time() const { return _create_time; } - void set_query_statistics(std::shared_ptr query_statistics) { - _query_statistics = query_statistics; - } - protected: Status _create_sink(int sender_id, const TDataSink& t_data_sink, RuntimeState* state); Status _build_pipelines(ExecNode*, PipelinePtr); @@ -230,17 +226,9 @@ class PipelineFragmentContext : public TaskExecutionContext { private: static bool _has_inverted_index_or_partial_update(TOlapTableSink sink); - std::shared_ptr _dml_query_statistics() { - if (_query_statistics && _query_statistics->collect_dml_statistics()) { - return _query_statistics; - } - return nullptr; - } std::vector> _tasks; uint64_t _create_time; - - std::shared_ptr _query_statistics = nullptr; }; } // namespace pipeline } // namespace doris \ No newline at end of file diff --git a/be/src/pipeline/pipeline_task.cpp b/be/src/pipeline/pipeline_task.cpp index 5f5ff56aa4bfa75..32f57c299862ac6 100644 --- a/be/src/pipeline/pipeline_task.cpp +++ b/be/src/pipeline/pipeline_task.cpp @@ -58,11 +58,6 @@ PipelineTask::PipelineTask(PipelinePtr& pipeline, uint32_t index, RuntimeState* _root(_operators.back()), _sink(sink) { _pipeline_task_watcher.start(); - _query_statistics.reset(new QueryStatistics(state->query_options().query_type)); - _sink->set_query_statistics(_query_statistics); - _collect_query_statistics_with_every_batch = - _pipeline->collect_query_statistics_with_every_batch(); - fragment_context->set_query_statistics(_query_statistics); } PipelineTask::PipelineTask(PipelinePtr& pipeline, uint32_t index, RuntimeState* state, @@ -285,10 +280,6 @@ Status PipelineTask::execute(bool* eos) { if (_block->rows() != 0 || *eos) { SCOPED_TIMER(_sink_timer); - if (_data_state == SourceState::FINISHED || - _collect_query_statistics_with_every_batch) { - RETURN_IF_ERROR(_collect_query_statistics()); - } status = _sink->sink(_state, block, _data_state); if (!status.is()) { RETURN_IF_ERROR(status); @@ -303,23 +294,6 @@ Status PipelineTask::execute(bool* eos) { return Status::OK(); } -Status PipelineTask::_collect_query_statistics() { - // The execnode tree of a fragment will be split into multiple pipelines, we only need to collect the root pipeline. - if (_pipeline->is_root_pipeline()) { - // If the current fragment has only one instance, we can collect all of them; - // otherwise, we need to collect them based on the sender_id. - if (_state->num_per_fragment_instances() == 1) { - _query_statistics->clear(); - RETURN_IF_ERROR(_root->collect_query_statistics(_query_statistics.get())); - } else { - _query_statistics->clear(); - RETURN_IF_ERROR(_root->collect_query_statistics(_query_statistics.get(), - _state->per_fragment_instance_idx())); - } - } - return Status::OK(); -} - Status PipelineTask::try_close(Status exec_status) { if (_try_close_flag) { return Status::OK(); diff --git a/be/src/pipeline/pipeline_task.h b/be/src/pipeline/pipeline_task.h index 85951bdb06a35a8..56e42370ff2c0c0 100644 --- a/be/src/pipeline/pipeline_task.h +++ b/be/src/pipeline/pipeline_task.h @@ -389,9 +389,6 @@ class PipelineTask { int64_t _close_pipeline_time = 0; RuntimeProfile::Counter* _pip_task_total_timer = nullptr; - std::shared_ptr _query_statistics; - Status _collect_query_statistics(); - bool _collect_query_statistics_with_every_batch = false; private: Operators _operators; // left is _source, right is _root diff --git a/be/src/pipeline/pipeline_x/operator.cpp b/be/src/pipeline/pipeline_x/operator.cpp index bc16d70b86ca669..e38e7b39d95b1b1 100644 --- a/be/src/pipeline/pipeline_x/operator.cpp +++ b/be/src/pipeline/pipeline_x/operator.cpp @@ -316,7 +316,9 @@ PipelineXLocalStateBase::PipelineXLocalStateBase(RuntimeState* state, OperatorXB _rows_returned_counter(nullptr), _peak_memory_usage_counter(nullptr), _parent(parent), - _state(state) {} + _state(state) { + _query_statistics = std::make_shared(); +} template Status PipelineXLocalState::init(RuntimeState* state, LocalStateInfo& info) { diff --git a/be/src/pipeline/pipeline_x/operator.h b/be/src/pipeline/pipeline_x/operator.h index ec2500acf3323f0..5304d0074f6b222 100644 --- a/be/src/pipeline/pipeline_x/operator.h +++ b/be/src/pipeline/pipeline_x/operator.h @@ -107,6 +107,8 @@ class PipelineXLocalStateBase { // override in Scan MultiCastSink virtual RuntimeFilterDependency* filterdependency() { return nullptr; } + std::shared_ptr query_statistics_ptr() { return _query_statistics; } + protected: friend class OperatorXBase; @@ -119,6 +121,8 @@ class PipelineXLocalStateBase { // which will providea reference for operator memory. std::unique_ptr _mem_tracker; + std::shared_ptr _query_statistics = nullptr; + RuntimeProfile::Counter* _rows_returned_counter = nullptr; RuntimeProfile::Counter* _blocks_returned_counter = nullptr; RuntimeProfile::Counter* _wait_for_dependency_timer = nullptr; @@ -401,7 +405,6 @@ class PipelineXSinkLocalStateBase { RuntimeState* state() { return _state; } RuntimeProfile* profile() { return _profile; } MemTracker* mem_tracker() { return _mem_tracker.get(); } - QueryStatistics* query_statistics() { return _query_statistics.get(); } [[nodiscard]] RuntimeProfile* faker_runtime_profile() const { return _faker_runtime_profile.get(); } @@ -418,8 +421,6 @@ class PipelineXSinkLocalStateBase { RuntimeState* _state = nullptr; RuntimeProfile* _profile = nullptr; std::unique_ptr _mem_tracker; - // Maybe this will be transferred to BufferControlBlock. - std::shared_ptr _query_statistics; // Set to true after close() has been called. subclasses should check and set this in // close(). bool _closed = false; diff --git a/be/src/pipeline/pipeline_x/pipeline_x_fragment_context.cpp b/be/src/pipeline/pipeline_x/pipeline_x_fragment_context.cpp index 6b1d9af512b82e1..4cc00312697de26 100644 --- a/be/src/pipeline/pipeline_x/pipeline_x_fragment_context.cpp +++ b/be/src/pipeline/pipeline_x/pipeline_x_fragment_context.cpp @@ -327,13 +327,8 @@ Status PipelineXFragmentContext::_create_data_sink(ObjectPool* pool, const TData if (!thrift_sink.__isset.stream_sink) { return Status::InternalError("Missing data stream sink."); } - bool send_query_statistics_with_every_batch = - params.__isset.send_query_statistics_with_every_batch - ? params.send_query_statistics_with_every_batch - : false; _sink.reset(new ExchangeSinkOperatorX(state, row_desc, next_sink_operator_id(), - thrift_sink.stream_sink, params.destinations, - send_query_statistics_with_every_batch)); + thrift_sink.stream_sink, params.destinations)); break; } case TDataSinkType::RESULT_SINK: { @@ -377,16 +372,11 @@ Status PipelineXFragmentContext::_create_data_sink(ObjectPool* pool, const TData } // TODO: figure out good buffer size based on size of output row - bool send_query_statistics_with_every_batch = - params.__isset.send_query_statistics_with_every_batch - ? params.send_query_statistics_with_every_batch - : false; // Result file sink is not the top sink if (params.__isset.destinations && params.destinations.size() > 0) { - _sink.reset(new ResultFileSinkOperatorX( - next_sink_operator_id(), row_desc, thrift_sink.result_file_sink, - params.destinations, send_query_statistics_with_every_batch, output_exprs, - desc_tbl)); + _sink.reset(new ResultFileSinkOperatorX(next_sink_operator_id(), row_desc, + thrift_sink.result_file_sink, + params.destinations, output_exprs, desc_tbl)); } else { _sink.reset( new ResultFileSinkOperatorX(next_sink_operator_id(), row_desc, output_exprs)); @@ -431,10 +421,10 @@ Status PipelineXFragmentContext::_create_data_sink(ObjectPool* pool, const TData // 2. create and set sink operator of data stream sender for new pipeline DataSinkOperatorXPtr sink_op; - sink_op.reset(new ExchangeSinkOperatorX( - state, *_row_desc, next_sink_operator_id(), - thrift_sink.multi_cast_stream_sink.sinks[i], - thrift_sink.multi_cast_stream_sink.destinations[i], false)); + sink_op.reset( + new ExchangeSinkOperatorX(state, *_row_desc, next_sink_operator_id(), + thrift_sink.multi_cast_stream_sink.sinks[i], + thrift_sink.multi_cast_stream_sink.destinations[i])); static_cast(new_pipeline->set_sink(sink_op)); { @@ -605,7 +595,8 @@ Status PipelineXFragmentContext::_build_pipeline_tasks( auto prepare_and_set_parent_profile = [&](PipelineXTask* task, size_t pip_idx) { DCHECK(pipeline_id_to_profile[pip_idx]); - RETURN_IF_ERROR(task->prepare(local_params, request.fragment.output_sink)); + RETURN_IF_ERROR( + task->prepare(local_params, request.fragment.output_sink, _query_ctx.get())); return Status::OK(); }; diff --git a/be/src/pipeline/pipeline_x/pipeline_x_task.cpp b/be/src/pipeline/pipeline_x/pipeline_x_task.cpp index 4c0c0af1cd428b9..29bc70fce678b22 100644 --- a/be/src/pipeline/pipeline_x/pipeline_x_task.cpp +++ b/be/src/pipeline/pipeline_x/pipeline_x_task.cpp @@ -67,7 +67,8 @@ PipelineXTask::PipelineXTask(PipelinePtr& pipeline, uint32_t task_id, RuntimeSta pipeline->incr_created_tasks(); } -Status PipelineXTask::prepare(const TPipelineInstanceParams& local_params, const TDataSink& tsink) { +Status PipelineXTask::prepare(const TPipelineInstanceParams& local_params, const TDataSink& tsink, + QueryContext* query_ctx) { DCHECK(_sink); DCHECK(_cur_state == PipelineTaskState::NOT_READY) << get_state_name(_cur_state); _init_profile(); @@ -97,6 +98,8 @@ Status PipelineXTask::prepare(const TPipelineInstanceParams& local_params, const _le_state_map, _task_idx, _source_dependency[op->operator_id()]}; RETURN_IF_ERROR(op->setup_local_state(_state, info)); parent_profile = _state->get_local_state(op->operator_id())->profile(); + query_ctx->register_query_statistics( + _state->get_local_state(op->operator_id())->query_statistics_ptr()); } _block = doris::vectorized::Block::create_unique(); diff --git a/be/src/pipeline/pipeline_x/pipeline_x_task.h b/be/src/pipeline/pipeline_x/pipeline_x_task.h index f7b996f40a71e1f..c9e17727c707b5c 100644 --- a/be/src/pipeline/pipeline_x/pipeline_x_task.h +++ b/be/src/pipeline/pipeline_x/pipeline_x_task.h @@ -62,7 +62,8 @@ class PipelineXTask : public PipelineTask { return Status::InternalError("Should not reach here!"); } - Status prepare(const TPipelineInstanceParams& local_params, const TDataSink& tsink); + Status prepare(const TPipelineInstanceParams& local_params, const TDataSink& tsink, + QueryContext* query_ctx); Status execute(bool* eos) override; diff --git a/be/src/runtime/buffer_control_block.cpp b/be/src/runtime/buffer_control_block.cpp index 91cb032c765fa00..d746a6aca1007bc 100644 --- a/be/src/runtime/buffer_control_block.cpp +++ b/be/src/runtime/buffer_control_block.cpp @@ -91,7 +91,9 @@ BufferControlBlock::BufferControlBlock(const TUniqueId& id, int buffer_size) _is_cancelled(false), _buffer_rows(0), _buffer_limit(buffer_size), - _packet_num(0) {} + _packet_num(0) { + _query_statistics = std::make_unique(); +} BufferControlBlock::~BufferControlBlock() { static_cast(cancel()); diff --git a/be/src/runtime/buffer_control_block.h b/be/src/runtime/buffer_control_block.h index f75008f10166665..d84bcba68184688 100644 --- a/be/src/runtime/buffer_control_block.h +++ b/be/src/runtime/buffer_control_block.h @@ -91,11 +91,7 @@ class BufferControlBlock { [[nodiscard]] const TUniqueId& fragment_id() const { return _fragment_id; } - void set_query_statistics(std::shared_ptr statistics) { - _query_statistics = statistics; - } - - void update_num_written_rows(int64_t num_rows) { + void update_return_rows(int64_t num_rows) { // _query_statistics may be null when the result sink init failed // or some other failure. // and the number of written rows is only needed when all things go well. @@ -104,13 +100,6 @@ class BufferControlBlock { } } - void update_max_peak_memory_bytes() { - if (_query_statistics != nullptr) { - int64_t max_peak_memory_bytes = _query_statistics->calculate_max_peak_memory_bytes(); - _query_statistics->set_max_peak_memory_bytes(max_peak_memory_bytes); - } - } - protected: virtual bool _get_batch_queue_empty() { return _fe_result_batch_queue.empty() && _arrow_flight_batch_queue.empty(); @@ -142,10 +131,8 @@ class BufferControlBlock { std::deque _waiting_rpc; - // It is shared with PlanFragmentExecutor and will be called in two different - // threads. But their calls are all at different time, there is no problem of - // multithreading access. - std::shared_ptr _query_statistics; + // only used for FE using return rows to check limit + std::unique_ptr _query_statistics; }; class PipBufferControlBlock : public BufferControlBlock { diff --git a/be/src/runtime/exec_env.h b/be/src/runtime/exec_env.h index 5ac1bffdad371d5..edc765ce879d3f6 100644 --- a/be/src/runtime/exec_env.h +++ b/be/src/runtime/exec_env.h @@ -74,6 +74,7 @@ class MemTracker; class StorageEngine; class ResultBufferMgr; class ResultQueueMgr; +class RuntimeQueryStatiticsMgr; class TMasterInfo; class LoadChannelMgr; class LoadStreamMgr; @@ -153,6 +154,9 @@ class ExecEnv { pipeline::TaskScheduler* pipeline_task_group_scheduler() { return _with_group_task_scheduler; } taskgroup::TaskGroupManager* task_group_manager() { return _task_group_manager; } WorkloadSchedPolicyMgr* workload_sched_policy_mgr() { return _workload_sched_mgr; } + RuntimeQueryStatiticsMgr* runtime_query_statistics_mgr() { + return _runtime_query_statistics_mgr; + } // using template to simplify client cache management template @@ -381,6 +385,8 @@ class ExecEnv { doris::pipeline::RuntimeFilterTimerQueue* _runtime_filter_timer_queue = nullptr; WorkloadSchedPolicyMgr* _workload_sched_mgr = nullptr; + + RuntimeQueryStatiticsMgr* _runtime_query_statistics_mgr = nullptr; }; template <> diff --git a/be/src/runtime/exec_env_init.cpp b/be/src/runtime/exec_env_init.cpp index dd54e6b10c263f8..9f1010083e47a6f 100644 --- a/be/src/runtime/exec_env_init.cpp +++ b/be/src/runtime/exec_env_init.cpp @@ -70,6 +70,7 @@ #include "runtime/result_buffer_mgr.h" #include "runtime/result_queue_mgr.h" #include "runtime/routine_load/routine_load_task_executor.h" +#include "runtime/runtime_query_statistics_mgr.h" #include "runtime/small_file_mgr.h" #include "runtime/stream_load/new_load_stream_mgr.h" #include "runtime/stream_load/stream_load_executor.h" @@ -196,6 +197,9 @@ Status ExecEnv::_init(const std::vector& store_paths, .set_max_queue_size(1000000) .build(&_lazy_release_obj_pool)); + // NOTE: runtime query statistics mgr could be visited by query and daemon thread + // so it should be created before all query begin and deleted after all query and daemon thread stoppped + _runtime_query_statistics_mgr = new RuntimeQueryStatiticsMgr(); init_file_cache_factory(); RETURN_IF_ERROR(init_pipeline_task_scheduler()); _task_group_manager = new taskgroup::TaskGroupManager(); @@ -257,7 +261,7 @@ Status ExecEnv::_init(const std::vector& store_paths, _storage_engine = new StorageEngine(options); auto st = _storage_engine->open(); if (!st.ok()) { - LOG(ERROR) << "Lail to open StorageEngine, res=" << st; + LOG(ERROR) << "Fail to open StorageEngine, res=" << st; return st; } _storage_engine->set_heartbeat_flags(this->heartbeat_flags()); @@ -633,6 +637,10 @@ void ExecEnv::destroy() { // info is deconstructed then BE process will core at coordinator back method in fragment mgr. SAFE_DELETE(_master_info); + // NOTE: runtime query statistics mgr could be visited by query and daemon thread + // so it should be created before all query begin and deleted after all query and daemon thread stoppped + SAFE_DELETE(_runtime_query_statistics_mgr); + LOG(INFO) << "Doris exec envorinment is destoried."; } diff --git a/be/src/runtime/fragment_mgr.cpp b/be/src/runtime/fragment_mgr.cpp index 8cf11047b62c094..d18eb2ef002a4d5 100644 --- a/be/src/runtime/fragment_mgr.cpp +++ b/be/src/runtime/fragment_mgr.cpp @@ -223,7 +223,6 @@ void FragmentMgr::coordinator_callback(const ReportStatusRequest& req) { if (req.query_statistics) { // use to report 'insert into select' TQueryStatistics queryStatistics; - DCHECK(req.query_statistics->collect_dml_statistics()); req.query_statistics->to_thrift(&queryStatistics); params.__set_query_statistics(queryStatistics); } diff --git a/be/src/runtime/group_commit_mgr.cpp b/be/src/runtime/group_commit_mgr.cpp index c7333e21d645158..6d2ec020e7bd4ef 100644 --- a/be/src/runtime/group_commit_mgr.cpp +++ b/be/src/runtime/group_commit_mgr.cpp @@ -23,6 +23,7 @@ #include #include "client_cache.h" +#include "common/compiler_util.h" #include "common/config.h" #include "runtime/exec_env.h" #include "runtime/fragment_mgr.h" @@ -53,7 +54,7 @@ Status LoadBlockQueue::add_block(RuntimeState* runtime_state, txn_id, label, load_instance_id.to_string()); } } - if (runtime_state->is_cancelled()) { + if (UNLIKELY(runtime_state->is_cancelled())) { return Status::Cancelled(runtime_state->cancel_reason()); } RETURN_IF_ERROR(status); @@ -520,6 +521,7 @@ Status LoadBlockQueue::close_wal() { } bool LoadBlockQueue::has_enough_wal_disk_space(size_t pre_allocated) { + DBUG_EXECUTE_IF("LoadBlockQueue.has_enough_wal_disk_space.low_space", { return false; }); auto* wal_mgr = ExecEnv::GetInstance()->wal_mgr(); size_t available_bytes = 0; { diff --git a/be/src/runtime/plan_fragment_executor.cpp b/be/src/runtime/plan_fragment_executor.cpp index 896178946e3a68d..9c40e2e9f34d8cf 100644 --- a/be/src/runtime/plan_fragment_executor.cpp +++ b/be/src/runtime/plan_fragment_executor.cpp @@ -92,10 +92,11 @@ PlanFragmentExecutor::PlanFragmentExecutor(ExecEnv* exec_env, _closed(false), _is_report_success(false), _is_report_on_cancel(true), - _collect_query_statistics_with_every_batch(false), _cancel_reason(PPlanFragmentCancelReason::INTERNAL_ERROR) { _report_thread_future = _report_thread_promise.get_future(); _start_time = VecDateTimeValue::local_time(); + _query_statistics = std::make_shared(); + _query_ctx->register_query_statistics(_query_statistics); } PlanFragmentExecutor::~PlanFragmentExecutor() { @@ -231,11 +232,6 @@ Status PlanFragmentExecutor::prepare(const TExecPlanFragmentParams& request) { if (sink_profile != nullptr) { profile()->add_child(sink_profile, true, nullptr); } - - _collect_query_statistics_with_every_batch = - params.__isset.send_query_statistics_with_every_batch - ? params.send_query_statistics_with_every_batch - : false; } else { // _sink is set to nullptr _sink.reset(nullptr); @@ -254,11 +250,6 @@ Status PlanFragmentExecutor::prepare(const TExecPlanFragmentParams& request) { VLOG_NOTICE << "plan_root=\n" << _plan->debug_string(); _prepared = true; - - _query_statistics.reset(new QueryStatistics(request.query_options.query_type)); - if (_sink != nullptr) { - _sink->set_query_statistics(_query_statistics); - } return Status::OK(); } @@ -336,10 +327,7 @@ Status PlanFragmentExecutor::open_vectorized_internal() { st = get_vectorized_internal(block.get(), &eos); RETURN_IF_ERROR(st); - // Collect this plan and sub plan statistics, and send to parent plan. - if (_collect_query_statistics_with_every_batch) { - _collect_query_statistics(); - } + _query_statistics->add_cpu_ms(_fragment_cpu_timer->value() / NANOS_PER_MILLIS); if (!eos || block->rows() > 0) { st = _sink->send(runtime_state(), block.get()); @@ -351,7 +339,6 @@ Status PlanFragmentExecutor::open_vectorized_internal() { } } { - _collect_query_statistics(); Status status; { std::lock_guard l(_status_lock); @@ -430,44 +417,6 @@ bool PlanFragmentExecutor::is_timeout(const VecDateTimeValue& now) const { return false; } -void PlanFragmentExecutor::_collect_query_statistics() { - _query_statistics->clear(); - Status status; - /// TODO(yxc): - // The judgment of enable_local_exchange here is a bug, it should not need to be checked. I will fix this later. - bool _is_local = false; - if (_runtime_state->query_options().__isset.enable_local_exchange) { - _is_local = _runtime_state->query_options().enable_local_exchange; - } - - if (_is_local) { - if (_runtime_state->num_per_fragment_instances() == 1) { - status = _plan->collect_query_statistics(_query_statistics.get()); - } else { - status = _plan->collect_query_statistics(_query_statistics.get(), - _runtime_state->per_fragment_instance_idx()); - } - } else { - status = _plan->collect_query_statistics(_query_statistics.get()); - } - - if (!status.ok()) { - LOG(INFO) << "collect query statistics failed, st=" << status; - return; - } - _query_statistics->add_cpu_ms(_fragment_cpu_timer->value() / NANOS_PER_MILLIS); - if (_runtime_state->backend_id() != -1) { - _collect_node_statistics(); - } -} - -void PlanFragmentExecutor::_collect_node_statistics() { - DCHECK(_runtime_state->backend_id() != -1); - NodeStatistics* node_statistics = - _query_statistics->add_nodes_statistics(_runtime_state->backend_id()); - node_statistics->set_peak_memory(_runtime_state->query_mem_tracker()->peak_consumption()); -} - void PlanFragmentExecutor::report_profile() { SCOPED_ATTACH_TASK(_runtime_state.get()); VLOG_FILE << "report_profile(): instance_id=" << _runtime_state->fragment_instance_id(); @@ -559,7 +508,7 @@ void PlanFragmentExecutor::send_report(bool done) { std::bind(&PlanFragmentExecutor::update_status, this, std::placeholders::_1), std::bind(&PlanFragmentExecutor::cancel, this, std::placeholders::_1, std::placeholders::_2), - _dml_query_statistics()}; + _query_ctx->get_query_statistics()}; // This will send a report even if we are cancelled. If the query completed correctly // but fragments still need to be cancelled (e.g. limit reached), the coordinator will // be waiting for a final report and profile. diff --git a/be/src/runtime/plan_fragment_executor.h b/be/src/runtime/plan_fragment_executor.h index 41817e5308d1b57..051448f13fa378b 100644 --- a/be/src/runtime/plan_fragment_executor.h +++ b/be/src/runtime/plan_fragment_executor.h @@ -47,7 +47,6 @@ class DataSink; class DescriptorTbl; class ExecEnv; class ObjectPool; -class QueryStatistics; struct ReportStatusRequest; namespace vectorized { @@ -231,12 +230,6 @@ class PlanFragmentExecutor : public TaskExecutionContext { VecDateTimeValue _start_time; - // It is shared with BufferControlBlock and will be called in two different - // threads. But their calls are all at different time, there is no problem of - // multithreaded access. - std::shared_ptr _query_statistics; - bool _collect_query_statistics_with_every_batch; - // Record the cancel information when calling the cancel() method, return it to FE PPlanFragmentCancelReason _cancel_reason; std::string _cancel_msg; @@ -275,16 +268,9 @@ class PlanFragmentExecutor : public TaskExecutionContext { const DescriptorTbl& desc_tbl() const { return _runtime_state->desc_tbl(); } - void _collect_query_statistics(); - - std::shared_ptr _dml_query_statistics() { - if (_query_statistics && _query_statistics->collect_dml_statistics()) { - return _query_statistics; - } - return nullptr; - } - void _collect_node_statistics(); + + std::shared_ptr _query_statistics = nullptr; }; } // namespace doris diff --git a/be/src/runtime/query_context.cpp b/be/src/runtime/query_context.cpp index e60f29b291551ce..bd7dee33b541633 100644 --- a/be/src/runtime/query_context.cpp +++ b/be/src/runtime/query_context.cpp @@ -19,6 +19,7 @@ #include "pipeline/pipeline_fragment_context.h" #include "pipeline/pipeline_x/dependency.h" +#include "runtime/runtime_query_statistics_mgr.h" namespace doris { @@ -73,6 +74,7 @@ QueryContext::~QueryContext() { static_cast(ExecEnv::GetInstance()->lazy_release_obj_pool()->submit( std::make_shared(std::move(_thread_token)))); } + _exec_env->runtime_query_statistics_mgr()->set_query_finished(print_id(_query_id)); } void QueryContext::set_ready_to_execute(bool is_cancelled) { @@ -118,4 +120,15 @@ bool QueryContext::cancel(bool v, std::string msg, Status new_status, int fragme } return true; } + +void QueryContext::register_query_statistics(std::shared_ptr qs) { + _exec_env->runtime_query_statistics_mgr()->register_query_statistics(print_id(_query_id), qs, + coord_addr); +} + +std::shared_ptr QueryContext::get_query_statistics() { + return _exec_env->runtime_query_statistics_mgr()->get_runtime_query_statistics( + print_id(_query_id)); +} + } // namespace doris diff --git a/be/src/runtime/query_context.h b/be/src/runtime/query_context.h index 203c5b6e3f4ca06..9e906cbbe192ab5 100644 --- a/be/src/runtime/query_context.h +++ b/be/src/runtime/query_context.h @@ -208,6 +208,10 @@ class QueryContext { pipeline::Dependency* get_execution_dependency() { return _execution_dependency.get(); } + void register_query_statistics(std::shared_ptr qs); + + std::shared_ptr get_query_statistics(); + public: DescriptorTbl* desc_tbl = nullptr; bool set_rsc_info = false; diff --git a/be/src/runtime/query_statistics.cpp b/be/src/runtime/query_statistics.cpp index 02789e2dabe9637..7171803ce03a307 100644 --- a/be/src/runtime/query_statistics.cpp +++ b/be/src/runtime/query_statistics.cpp @@ -64,11 +64,11 @@ void QueryStatistics::to_pb(PQueryStatistics* statistics) { void QueryStatistics::to_thrift(TQueryStatistics* statistics) const { DCHECK(statistics != nullptr); - statistics->scan_bytes = scan_bytes; - statistics->scan_rows = scan_rows; - statistics->cpu_ms = cpu_ms; - statistics->returned_rows = returned_rows; - statistics->max_peak_memory_bytes = max_peak_memory_bytes; + statistics->__set_scan_bytes(scan_bytes); + statistics->__set_scan_rows(scan_rows); + statistics->__set_cpu_ms(cpu_ms); + statistics->__set_returned_rows(returned_rows); + statistics->__set_max_peak_memory_bytes(max_peak_memory_bytes); } void QueryStatistics::from_pb(const PQueryStatistics& statistics) { diff --git a/be/src/runtime/query_statistics.h b/be/src/runtime/query_statistics.h index fa39c9ea183e254..8c4662ba59d16e0 100644 --- a/be/src/runtime/query_statistics.h +++ b/be/src/runtime/query_statistics.h @@ -59,13 +59,8 @@ class NodeStatistics { // or plan's statistics and QueryStatisticsRecvr is responsible for collecting it. class QueryStatistics { public: - QueryStatistics(TQueryType::type query_type = TQueryType::type::SELECT) - : scan_rows(0), - scan_bytes(0), - cpu_ms(0), - returned_rows(0), - max_peak_memory_bytes(0), - _query_type(query_type) {} + QueryStatistics() + : scan_rows(0), scan_bytes(0), cpu_ms(0), returned_rows(0), max_peak_memory_bytes(0) {} virtual ~QueryStatistics(); void merge(const QueryStatistics& other); @@ -103,8 +98,9 @@ class QueryStatistics { void clearNodeStatistics(); void clear() { - scan_rows = 0; - scan_bytes = 0; + scan_rows.store(0); + scan_bytes.store(0); + cpu_ms = 0; returned_rows = 0; max_peak_memory_bytes = 0; @@ -119,13 +115,13 @@ class QueryStatistics { bool collected() const { return _collected; } void set_collected() { _collected = true; } - // LOAD does not need to collect information on the exchange node. - bool collect_dml_statistics() { return _query_type == TQueryType::LOAD; } + int64_t get_scan_rows() { return scan_rows.load(); } + int64_t get_scan_bytes() { return scan_bytes.load(); } private: friend class QueryStatisticsRecvr; - int64_t scan_rows; - int64_t scan_bytes; + std::atomic scan_rows; + std::atomic scan_bytes; int64_t cpu_ms; // number rows returned by query. // only set once by result sink when closing. @@ -137,7 +133,6 @@ class QueryStatistics { using NodeStatisticsMap = std::unordered_map; NodeStatisticsMap _nodes_statistics_map; bool _collected = false; - const TQueryType::type _query_type; }; using QueryStatisticsPtr = std::shared_ptr; // It is used for collecting sub plan query statistics in DataStreamRecvr. diff --git a/be/src/runtime/runtime_query_statistics_mgr.cpp b/be/src/runtime/runtime_query_statistics_mgr.cpp new file mode 100644 index 000000000000000..6a8aa3f5097ccac --- /dev/null +++ b/be/src/runtime/runtime_query_statistics_mgr.cpp @@ -0,0 +1,148 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include "runtime/runtime_query_statistics_mgr.h" + +#include "runtime/client_cache.h" +#include "runtime/exec_env.h" +#include "util/debug_util.h" + +namespace doris { + +void RuntimeQueryStatiticsMgr::register_query_statistics(std::string query_id, + std::shared_ptr qs_ptr, + TNetworkAddress fe_addr) { + std::lock_guard write_lock(_qs_ctx_map_lock); + if (_query_statistics_ctx_map.find(query_id) == _query_statistics_ctx_map.end()) { + _query_statistics_ctx_map[query_id] = std::make_unique(fe_addr); + } + _query_statistics_ctx_map.at(query_id)->qs_list.push_back(qs_ptr); +} + +void RuntimeQueryStatiticsMgr::report_runtime_query_statistics() { + int64_t be_id = ExecEnv::GetInstance()->master_info()->backend_id; + // 1 get query statistics map + std::map> fe_qs_map; + std::map query_finished; + { + std::lock_guard write_lock(_qs_ctx_map_lock); + for (auto& [query_id, qs_ctx_ptr] : _query_statistics_ctx_map) { + if (fe_qs_map.find(qs_ctx_ptr->fe_addr) == fe_qs_map.end()) { + std::map tmp_map; + fe_qs_map[qs_ctx_ptr->fe_addr] = std::move(tmp_map); + } + + QueryStatistics tmp_qs; + for (auto& qs_ptr : qs_ctx_ptr->qs_list) { + tmp_qs.merge(*qs_ptr); + } + TQueryStatistics ret_t_qs; + tmp_qs.to_thrift(&ret_t_qs); + fe_qs_map.at(qs_ctx_ptr->fe_addr)[query_id] = ret_t_qs; + query_finished[query_id] = qs_ctx_ptr->is_query_finished; + } + } + + // 2 report query statistics to fe + std::map rpc_result; + for (auto& [addr, qs_map] : fe_qs_map) { + rpc_result[addr] = false; + // 2.1 get client + Status coord_status; + FrontendServiceConnection coord(ExecEnv::GetInstance()->frontend_client_cache(), addr, + &coord_status); + std::string add_str = PrintThriftNetworkAddress(addr); + if (!coord_status.ok()) { + std::stringstream ss; + LOG(WARNING) << "could not get client " << add_str + << " when report workload runtime stats, reason is " + << coord_status.to_string(); + continue; + } + + // 2.2 send report + TReportWorkloadRuntimeStatusParams report_runtime_params; + report_runtime_params.__set_backend_id(be_id); + report_runtime_params.__set_query_statistics_map(qs_map); + + TReportExecStatusParams params; + params.report_workload_runtime_status = report_runtime_params; + + TReportExecStatusResult res; + Status rpc_status; + try { + coord->reportExecStatus(res, params); + rpc_result[addr] = true; + } catch (apache::thrift::transport::TTransportException& e) { + LOG(WARNING) << "report workload runtime stats to " << add_str + << " failed, err: " << e.what(); + rpc_status = coord.reopen(); + if (!rpc_status.ok()) { + LOG(WARNING) + << "reopen thrift client failed when report workload runtime statistics to" + << add_str; + } else { + try { + coord->reportExecStatus(res, params); + rpc_result[addr] = true; + } catch (apache::thrift::transport::TTransportException& e2) { + LOG(WARNING) << "retry report workload runtime stats to " << add_str + << " failed, err: " << e2.what(); + } + } + } + } + + // 3 when query is finished and (last rpc is send success), remove finished query statistics + { + std::lock_guard write_lock(_qs_ctx_map_lock); + for (auto& [addr, qs_map] : fe_qs_map) { + if (rpc_result[addr]) { + for (auto& [query_id, qs] : qs_map) { + if (query_finished[query_id]) { + _query_statistics_ctx_map.erase(query_id); + } + } + } + } + } +} + +void RuntimeQueryStatiticsMgr::set_query_finished(std::string query_id) { + // NOTE: here must be a write lock + std::lock_guard write_lock(_qs_ctx_map_lock); + // when a query get query_ctx succ, but failed before create node/operator, + // it may not register query statistics, so it can not be mark finish + if (_query_statistics_ctx_map.find(query_id) != _query_statistics_ctx_map.end()) { + _query_statistics_ctx_map.at(query_id)->is_query_finished = true; + } +} + +std::shared_ptr RuntimeQueryStatiticsMgr::get_runtime_query_statistics( + std::string query_id) { + std::shared_lock read_lock(_qs_ctx_map_lock); + if (_query_statistics_ctx_map.find(query_id) == _query_statistics_ctx_map.end()) { + return nullptr; + } + std::shared_ptr qs_ptr = std::make_shared(); + for (auto const& qs : _query_statistics_ctx_map[query_id]->qs_list) { + qs_ptr->merge(*qs); + } + return qs_ptr; +} + +} // namespace doris \ No newline at end of file diff --git a/be/src/runtime/runtime_query_statistics_mgr.h b/be/src/runtime/runtime_query_statistics_mgr.h new file mode 100644 index 000000000000000..b3fa4bbc408b248 --- /dev/null +++ b/be/src/runtime/runtime_query_statistics_mgr.h @@ -0,0 +1,58 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#pragma once + +#include +#include + +#include "runtime/query_statistics.h" + +namespace doris { + +class QueryStatisticsCtx { +public: + QueryStatisticsCtx(TNetworkAddress fe_addr) : fe_addr(fe_addr) { + this->is_query_finished = false; + } + ~QueryStatisticsCtx() = default; + + std::vector> qs_list; + std::atomic is_query_finished; + TNetworkAddress fe_addr; +}; + +class RuntimeQueryStatiticsMgr { +public: + RuntimeQueryStatiticsMgr() = default; + ~RuntimeQueryStatiticsMgr() = default; + + void register_query_statistics(std::string query_id, std::shared_ptr qs_ptr, + TNetworkAddress fe_addr); + + void report_runtime_query_statistics(); + + void set_query_finished(std::string query_id); + + std::shared_ptr get_runtime_query_statistics(std::string query_id); + +private: + std::shared_mutex _qs_ctx_map_lock; + std::map> _query_statistics_ctx_map; +}; + +} // namespace doris \ No newline at end of file diff --git a/be/src/service/internal_service.cpp b/be/src/service/internal_service.cpp index 18a8325e4cb081d..e7255f017b7b593 100644 --- a/be/src/service/internal_service.cpp +++ b/be/src/service/internal_service.cpp @@ -391,7 +391,9 @@ void PInternalServiceImpl::open_load_stream(google::protobuf::RpcController* con } stream_options.handler = load_stream.get(); - stream_options.idle_timeout_ms = config::load_stream_idle_timeout_ms; + stream_options.idle_timeout_ms = request->idle_timeout_ms(); + DBUG_EXECUTE_IF("PInternalServiceImpl.open_load_stream.set_idle_timeout", + { stream_options.idle_timeout_ms = 1; }); StreamId streamid; if (brpc::StreamAccept(&streamid, *cntl, &stream_options) != 0) { diff --git a/be/src/util/simd/vstring_function.h b/be/src/util/simd/vstring_function.h index 306a18a6d66ec7b..dac964b1b94224b 100644 --- a/be/src/util/simd/vstring_function.h +++ b/be/src/util/simd/vstring_function.h @@ -44,6 +44,54 @@ inline uint8_t get_utf8_byte_length(uint8_t character) { return UTF8_BYTE_LENGTH[character]; } +// copy from https://github.com/lemire/fastvalidate-utf-8/blob/master/include/simdasciicheck.h +// The function returns true (1) if all chars passed in src are +// 7-bit values (0x00..0x7F). Otherwise, it returns false (0). +inline bool validate_ascii_fast(const char* src, size_t len) { + size_t i = 0; + __m128i has_error = _mm_setzero_si128(); + if (len >= 16) { + for (; i <= len - 16; i += 16) { + __m128i current_bytes = _mm_loadu_si128((const __m128i*)(src + i)); + has_error = _mm_or_si128(has_error, current_bytes); + } + } + int error_mask = _mm_movemask_epi8(has_error); + + char tail_has_error = 0; + for (; i < len; i++) { + tail_has_error |= src[i]; + } + error_mask |= (tail_has_error & 0x80); + + return !error_mask; +} + +#ifdef __AVX2__ +#include +// The function returns true (1) if all chars passed in src are +// 7-bit values (0x00..0x7F). Otherwise, it returns false (0). +inline bool validate_ascii_fast_avx(const char* src, size_t len) { + size_t i = 0; + __m256i has_error = _mm256_setzero_si256(); + if (len >= 32) { + for (; i <= len - 32; i += 32) { + __m256i current_bytes = _mm256_loadu_si256((const __m256i*)(src + i)); + has_error = _mm256_or_si256(has_error, current_bytes); + } + } + int error_mask = _mm256_movemask_epi8(has_error); + + char tail_has_error = 0; + for (; i < len; i++) { + tail_has_error |= src[i]; + } + error_mask |= (tail_has_error & 0x80); + + return !error_mask; +} +#endif + namespace simd { class VStringFunctions { @@ -219,11 +267,10 @@ class VStringFunctions { // Gcc will do auto simd in this function static bool is_ascii(const StringRef& str) { - char or_code = 0; - for (size_t i = 0; i < str.size; i++) { - or_code |= str.data[i]; - } - return !(or_code & 0x80); +#ifdef __AVX2__ + return validate_ascii_fast_avx(str.data, str.size); +#endif + return validate_ascii_fast(str.data, str.size); } static void reverse(const StringRef& str, StringRef dst) { diff --git a/be/src/vec/aggregate_functions/aggregate_function_null.h b/be/src/vec/aggregate_functions/aggregate_function_null.h index becb06f7cfca64d..939396073825c49 100644 --- a/be/src/vec/aggregate_functions/aggregate_function_null.h +++ b/be/src/vec/aggregate_functions/aggregate_function_null.h @@ -210,17 +210,29 @@ class AggregateFunctionNullUnaryInline final } } - void add_batch(size_t batch_size, AggregateDataPtr* places, size_t place_offset, + void add_batch(size_t batch_size, AggregateDataPtr* __restrict places, size_t place_offset, const IColumn** columns, Arena* arena, bool agg_many) const override { - const ColumnNullable* column = assert_cast(columns[0]); - // The overhead introduced is negligible here, just an extra memory read from NullMap - const auto* __restrict null_map_data = column->get_null_map_data().data(); + const auto* column = assert_cast(columns[0]); const IColumn* nested_column = &column->get_nested_column(); - for (int i = 0; i < batch_size; ++i) { - if (!null_map_data[i]) { - AggregateDataPtr __restrict place = places[i] + place_offset; - this->set_flag(place); - this->nested_function->add(this->nested_place(place), &nested_column, i, arena); + if (column->has_null()) { + const auto* __restrict null_map_data = column->get_null_map_data().data(); + for (int i = 0; i < batch_size; ++i) { + if (!null_map_data[i]) { + AggregateDataPtr __restrict place = places[i] + place_offset; + this->set_flag(place); + this->nested_function->add(this->nested_place(place), &nested_column, i, arena); + } + } + } else { + if constexpr (result_is_nullable) { + for (int i = 0; i < batch_size; ++i) { + AggregateDataPtr __restrict place = places[i] + place_offset; + place[0] |= 1; + this->nested_function->add(this->nested_place(place), &nested_column, i, arena); + } + } else { + this->nested_function->add_batch(batch_size, places, place_offset, &nested_column, + arena, agg_many); } } } diff --git a/be/src/vec/columns/column.h b/be/src/vec/columns/column.h index d67ad3e206b6951..23f9073eff5e532 100644 --- a/be/src/vec/columns/column.h +++ b/be/src/vec/columns/column.h @@ -603,9 +603,6 @@ class IColumn : public COW { // true if column has null element [0,size) virtual bool has_null(size_t size) const { return false; } - /// It's a special kind of column, that contain single value, but is not a ColumnConst. - virtual bool is_dummy() const { return false; } - virtual bool is_exclusive() const { return use_count() == 1; } /// Clear data of column, just like vector clear @@ -733,7 +730,7 @@ using ColumnPtr = IColumn::Ptr; using MutableColumnPtr = IColumn::MutablePtr; using Columns = std::vector; using MutableColumns = std::vector; - +using ColumnPtrs = std::vector; using ColumnRawPtrs = std::vector; template diff --git a/be/src/vec/columns/column_array.cpp b/be/src/vec/columns/column_array.cpp index 866a55c447f12a2..8d1cbdd69acc1dd 100644 --- a/be/src/vec/columns/column_array.cpp +++ b/be/src/vec/columns/column_array.cpp @@ -395,10 +395,17 @@ void ColumnArray::update_crcs_with_value(uint32_t* __restrict hash, PrimitiveTyp } void ColumnArray::insert(const Field& x) { - const Array& array = doris::vectorized::get(x); - size_t size = array.size(); - for (size_t i = 0; i < size; ++i) get_data().insert(array[i]); - get_offsets().push_back(get_offsets().back() + size); + if (x.is_null()) { + get_data().insert(Null()); + get_offsets().push_back(get_offsets().back() + 1); + } else { + const auto& array = doris::vectorized::get(x); + size_t size = array.size(); + for (size_t i = 0; i < size; ++i) { + get_data().insert(array[i]); + } + get_offsets().push_back(get_offsets().back() + size); + } } void ColumnArray::insert_from(const IColumn& src_, size_t n) { diff --git a/be/src/vec/columns/column_const.h b/be/src/vec/columns/column_const.h index 8d03087cc3d9890..f36f0e81879107c 100644 --- a/be/src/vec/columns/column_const.h +++ b/be/src/vec/columns/column_const.h @@ -73,7 +73,9 @@ class ColumnConst final : public COWHelper { public: ColumnPtr convert_to_full_column() const; - ColumnPtr convert_to_full_column_if_const() const override { return convert_to_full_column(); } + ColumnPtr convert_to_full_column_if_const() const override { + return convert_to_full_column()->convert_to_full_column_if_const(); + } ColumnPtr remove_low_cardinality() const; diff --git a/be/src/vec/columns/column_decimal.cpp b/be/src/vec/columns/column_decimal.cpp index ed76ce6a1133080..dd42b3563a827da 100644 --- a/be/src/vec/columns/column_decimal.cpp +++ b/be/src/vec/columns/column_decimal.cpp @@ -79,7 +79,7 @@ template void ColumnDecimal::serialize_vec(std::vector& keys, size_t num_rows, size_t max_row_byte_size) const { for (size_t i = 0; i < num_rows; ++i) { - memcpy(const_cast(keys[i].data + keys[i].size), &data[i], sizeof(T)); + memcpy_fixed(const_cast(keys[i].data + keys[i].size), (char*)&data[i]); keys[i].size += sizeof(T); } } @@ -89,7 +89,7 @@ void ColumnDecimal::serialize_vec_with_null_map(std::vector& keys, const uint8_t* null_map) const { for (size_t i = 0; i < num_rows; ++i) { if (null_map[i] == 0) { - memcpy(const_cast(keys[i].data + keys[i].size), &data[i], sizeof(T)); + memcpy_fixed(const_cast(keys[i].data + keys[i].size), (char*)&data[i]); keys[i].size += sizeof(T); } } diff --git a/be/src/vec/columns/column_dummy.h b/be/src/vec/columns/column_dummy.h index f1f7dac52d58891..b51cd8faa659ebe 100644 --- a/be/src/vec/columns/column_dummy.h +++ b/be/src/vec/columns/column_dummy.h @@ -150,8 +150,6 @@ class IColumnDummy : public IColumn { void addSize(size_t delta) { s += delta; } - bool is_dummy() const override { return true; } - void replace_column_data(const IColumn& rhs, size_t row, size_t self_row = 0) override { LOG(FATAL) << "should not call the method in column dummy"; } diff --git a/be/src/vec/columns/column_map.cpp b/be/src/vec/columns/column_map.cpp index e46ea7fe683128b..53d23df8ae3d716 100644 --- a/be/src/vec/columns/column_map.cpp +++ b/be/src/vec/columns/column_map.cpp @@ -520,4 +520,10 @@ size_t ColumnMap::allocated_bytes() const { get_offsets().allocated_bytes(); } +ColumnPtr ColumnMap::convert_to_full_column_if_const() const { + return ColumnMap::create(keys_column->convert_to_full_column_if_const(), + values_column->convert_to_full_column_if_const(), + offsets_column->convert_to_full_column_if_const()); +} + } // namespace doris::vectorized diff --git a/be/src/vec/columns/column_map.h b/be/src/vec/columns/column_map.h index 45cedeb0d94e69c..fe1ccfb6f82e9e3 100644 --- a/be/src/vec/columns/column_map.h +++ b/be/src/vec/columns/column_map.h @@ -91,6 +91,8 @@ class ColumnMap final : public COWHelper { offsets_column->clear(); } + ColumnPtr convert_to_full_column_if_const() const override; + MutableColumnPtr clone_resized(size_t size) const override; Field operator[](size_t n) const override; diff --git a/be/src/vec/columns/column_nullable.cpp b/be/src/vec/columns/column_nullable.cpp index ecf330bead3ca69..426de2d4f70eee1 100644 --- a/be/src/vec/columns/column_nullable.cpp +++ b/be/src/vec/columns/column_nullable.cpp @@ -257,15 +257,22 @@ size_t ColumnNullable::get_max_row_byte_size() const { void ColumnNullable::serialize_vec(std::vector& keys, size_t num_rows, size_t max_row_byte_size) const { - const auto& arr = get_null_map_data(); - static constexpr auto s = sizeof(arr[0]); - for (size_t i = 0; i < num_rows; ++i) { - auto* val = const_cast(keys[i].data + keys[i].size); - *val = (arr[i] ? 1 : 0); - keys[i].size += s; + if (has_null()) { + const auto& arr = get_null_map_data(); + for (size_t i = 0; i < num_rows; ++i) { + auto* val = const_cast(keys[i].data + keys[i].size); + *val = (arr[i] ? 1 : 0); + keys[i].size++; + } + get_nested_column().serialize_vec_with_null_map(keys, num_rows, arr.data()); + } else { + for (size_t i = 0; i < num_rows; ++i) { + auto* val = const_cast(keys[i].data + keys[i].size); + *val = 0; + keys[i].size++; + } + get_nested_column().serialize_vec(keys, num_rows, max_row_byte_size); } - - get_nested_column().serialize_vec_with_null_map(keys, num_rows, arr.data()); } void ColumnNullable::deserialize_vec(std::vector& keys, const size_t num_rows) { @@ -282,7 +289,11 @@ void ColumnNullable::deserialize_vec(std::vector& keys, const size_t keys[i].data += sizeof(val); keys[i].size -= sizeof(val); } - get_nested_column().deserialize_vec_with_null_map(keys, num_rows, arr.data()); + if (_has_null) { + get_nested_column().deserialize_vec_with_null_map(keys, num_rows, arr.data()); + } else { + get_nested_column().deserialize_vec(keys, num_rows); + } } void ColumnNullable::insert_range_from(const IColumn& src, size_t start, size_t length) { diff --git a/be/src/vec/columns/column_nullable.h b/be/src/vec/columns/column_nullable.h index 10b0951ab8b96af..83cbe82e328fd91 100644 --- a/be/src/vec/columns/column_nullable.h +++ b/be/src/vec/columns/column_nullable.h @@ -274,7 +274,8 @@ class ColumnNullable final : public COWHelper { size_t size_of_value_if_fixed() const override { return null_map->size_of_value_if_fixed() + nested_column->size_of_value_if_fixed(); } - bool only_null() const override { return nested_column->is_dummy(); } + + bool only_null() const override { return size() == 1 && is_null_at(0); } // used in schema change void change_nested_column(ColumnPtr& other) { ((ColumnPtr&)nested_column) = other; } diff --git a/be/src/vec/columns/column_object.cpp b/be/src/vec/columns/column_object.cpp index 77366d05cb58b24..aff38c56a80bb0f 100644 --- a/be/src/vec/columns/column_object.cpp +++ b/be/src/vec/columns/column_object.cpp @@ -488,7 +488,7 @@ void ColumnObject::Subcolumn::finalize() { throw doris::Exception(ErrorCode::INVALID_ARGUMENT, st.to_string() + ", real_code:{}", st.code()); } - part = ptr; + part = ptr->convert_to_full_column_if_const(); } result_column->insert_range_from(*part, 0, part_size); } diff --git a/be/src/vec/columns/column_string.cpp b/be/src/vec/columns/column_string.cpp index 424a8717e1498c8..337f5e5663a2425 100644 --- a/be/src/vec/columns/column_string.cpp +++ b/be/src/vec/columns/column_string.cpp @@ -313,7 +313,7 @@ void ColumnString::serialize_vec(std::vector& keys, size_t num_rows, uint32_t string_size(size_at(i)); auto* ptr = const_cast(keys[i].data + keys[i].size); - memcpy(ptr, &string_size, sizeof(string_size)); + memcpy_fixed(ptr, (char*)&string_size); memcpy(ptr + sizeof(string_size), &chars[offset], string_size); keys[i].size += sizeof(string_size) + string_size; } @@ -327,7 +327,7 @@ void ColumnString::serialize_vec_with_null_map(std::vector& keys, siz uint32_t string_size(size_at(i)); auto* ptr = const_cast(keys[i].data + keys[i].size); - memcpy(ptr, &string_size, sizeof(string_size)); + memcpy_fixed(ptr, (char*)&string_size); memcpy(ptr + sizeof(string_size), &chars[offset], string_size); keys[i].size += sizeof(string_size) + string_size; } diff --git a/be/src/vec/common/hash_table/hash_map_context.h b/be/src/vec/common/hash_table/hash_map_context.h index 0da222d37216dc2..3784379902bfdf0 100644 --- a/be/src/vec/common/hash_table/hash_map_context.h +++ b/be/src/vec/common/hash_table/hash_map_context.h @@ -377,12 +377,14 @@ struct MethodKeysFixed : public MethodBase { assert_cast(*nullmap_columns[j]).get_data().data(); for (size_t i = 0; i < row_numbers; ++i) { // make sure null cell is filled by 0x0 - memcpy_fixed((char*)(&result[i]) + offset, - nullmap[i] ? (char*)&zero : data + i * sizeof(Fixed)); + memcpy_fixed( + (char*)(&result[i]) + offset, + nullmap[i] ? (char*)&zero : data + i * sizeof(Fixed)); } } else { for (size_t i = 0; i < row_numbers; ++i) { - memcpy_fixed((char*)(&result[i]) + offset, data + i * sizeof(Fixed)); + memcpy_fixed((char*)(&result[i]) + offset, + data + i * sizeof(Fixed)); } } }; @@ -474,7 +476,8 @@ struct MethodKeysFixed : public MethodBase { auto foo = [&](Fixed zero) { CHECK_EQ(sizeof(Fixed), size); for (size_t j = 0; j < num_rows; j++) { - memcpy_fixed(data + j * sizeof(Fixed), (char*)(&input_keys[j]) + pos); + memcpy_fixed(data + j * sizeof(Fixed), + (char*)(&input_keys[j]) + pos); } }; diff --git a/be/src/vec/common/memcpy_small.h b/be/src/vec/common/memcpy_small.h index af5d0e6074d8bb7..473900663186e6b 100644 --- a/be/src/vec/common/memcpy_small.h +++ b/be/src/vec/common/memcpy_small.h @@ -82,7 +82,12 @@ inline void memcpy_small_allow_read_write_overflow15(void* __restrict dst, #endif -template +// assume input address not aligned by default +template void memcpy_fixed(char* lhs, const char* rhs) { - *(T*)lhs = *(T*)rhs; + if constexpr (aligned || sizeof(T) <= 8) { + *(T*)lhs = *(T*)rhs; + } else { + memcpy(lhs, rhs, sizeof(T)); + } } diff --git a/be/src/vec/common/pod_array.h b/be/src/vec/common/pod_array.h index 91642aebd470b02..0d7cde6503de10e 100644 --- a/be/src/vec/common/pod_array.h +++ b/be/src/vec/common/pod_array.h @@ -480,6 +480,14 @@ class PODArray : public PODArrayBasec_end += bytes_to_copy; } + template + void insert_assume_reserved_and_allow_overflow(It1 from_begin, It2 from_end) { + size_t bytes_to_copy = this->byte_size(from_end - from_begin); + memcpy_small_allow_read_write_overflow15( + this->c_end, reinterpret_cast(&*from_begin), bytes_to_copy); + this->c_end += bytes_to_copy; + } + void swap(PODArray& rhs) { #ifndef NDEBUG this->unprotect(); diff --git a/be/src/vec/common/schema_util.cpp b/be/src/vec/common/schema_util.cpp index 1290ddb237f09e9..0ceddf25b3feee0 100644 --- a/be/src/vec/common/schema_util.cpp +++ b/be/src/vec/common/schema_util.cpp @@ -150,9 +150,6 @@ Status cast_column(const ColumnWithTypeAndName& arg, const DataTypePtr& type, Co type->get_name()); } Block tmp_block {arguments}; - vectorized::ColumnNumbers argnum; - argnum.emplace_back(0); - argnum.emplace_back(1); size_t result_column = tmp_block.columns(); auto ctx = FunctionContext::create_context(nullptr, {}, {}); // We convert column string to jsonb type just add a string jsonb field to dst column instead of parse @@ -160,32 +157,8 @@ Status cast_column(const ColumnWithTypeAndName& arg, const DataTypePtr& type, Co ctx->set_string_as_jsonb_string(true); tmp_block.insert({nullptr, type, arg.name}); RETURN_IF_ERROR( - function->execute(ctx.get(), tmp_block, argnum, result_column, arg.column->size())); - *result = std::move(tmp_block.get_by_position(result_column).column); - // Variant column is a really special case, src type is nullable but dst variant type is none nullable, - // but we still need to wrap nullmap into variant root column to prevent from nullable info lost. - // TODO rethink and better handle this sepecial situation - if (arg.type->is_nullable() && WhichDataType(remove_nullable(type)).is_variant_type()) { - auto variant = ColumnObject::create(true); - auto& old_variant = - (*result)->is_nullable() - ? assert_cast( - assert_cast(**result).get_nested_column()) - : assert_cast(*(*result)->assume_mutable()); - DCHECK(!old_variant.get_root()->is_nullable()); - auto nullable = ColumnNullable::create( - old_variant.get_root(), - assert_cast(*arg.column).get_null_map_column_ptr()); - variant->create_root(make_nullable(arg.type), nullable->assume_mutable()); - if ((*result)->is_nullable()) { - *result = ColumnNullable::create(std::move(variant), - assert_cast(*arg.column) - .get_null_map_column_ptr() - ->clone_resized(nullable->size())); - } else { - *result = std::move(variant); - } - } + function->execute(ctx.get(), tmp_block, {0}, result_column, arg.column->size())); + *result = tmp_block.get_by_position(result_column).column->convert_to_full_column_if_const(); return Status::OK(); } @@ -422,6 +395,7 @@ Status parse_variant_columns(Block& block, const std::vector& variant_pos) bool is_nullable = column_ref->is_nullable(); const auto& column = remove_nullable(column_ref); const auto& var = assert_cast(*column.get()); + var.assume_mutable_ref().finalize(); if (!var.is_scalar_variant()) { // already parsed continue; @@ -431,12 +405,19 @@ Status parse_variant_columns(Block& block, const std::vector& variant_pos) // TODO more efficient way to parse jsonb type, currently we just convert jsonb to // json str and parse them into variant RETURN_IF_ERROR(cast_column({var.get_root(), var.get_root_type(), ""}, - std::make_shared(), &raw_json_column)); + var.get_root()->is_nullable() + ? make_nullable(std::make_shared()) + : std::make_shared(), + &raw_json_column)); + if (raw_json_column->is_nullable()) { + raw_json_column = assert_cast(raw_json_column.get()) + ->get_nested_column_ptr(); + } } else { const auto& root = *var.get_root(); raw_json_column = root.is_nullable() - ? static_cast(root).get_nested_column_ptr() + ? assert_cast(root).get_nested_column_ptr() : var.get_root(); } diff --git a/be/src/vec/common/sort/partition_sorter.cpp b/be/src/vec/common/sort/partition_sorter.cpp index 083c676ba8c1ff7..a3954aa3f55c67e 100644 --- a/be/src/vec/common/sort/partition_sorter.cpp +++ b/be/src/vec/common/sort/partition_sorter.cpp @@ -66,7 +66,7 @@ Status PartitionSorter::prepare_for_read() { auto& cursors = _state->get_cursors(); auto& blocks = _state->get_sorted_block(); auto& priority_queue = _state->get_priority_queue(); - for (const auto& block : blocks) { + for (auto& block : blocks) { cursors.emplace_back(block, _sort_description); } for (auto& cursor : cursors) { diff --git a/be/src/vec/common/sort/sorter.cpp b/be/src/vec/common/sort/sorter.cpp index bd095e1afc561c4..644db862f063e66 100644 --- a/be/src/vec/common/sort/sorter.cpp +++ b/be/src/vec/common/sort/sorter.cpp @@ -99,12 +99,14 @@ Status MergeSorterState::add_sorted_block(Block& block) { } void MergeSorterState::_build_merge_tree_not_spilled(const SortDescription& sort_description) { - for (const auto& block : sorted_blocks_) { + for (auto& block : sorted_blocks_) { cursors_.emplace_back(block, sort_description); } if (sorted_blocks_.size() > 1) { - for (auto& cursor : cursors_) priority_queue_.push(MergeSortCursor(&cursor)); + for (auto& cursor : cursors_) { + priority_queue_.emplace(&cursor); + } } } diff --git a/be/src/vec/common/sort/sorter.h b/be/src/vec/common/sort/sorter.h index e372e6367d86438..e0a2b92ceed5a39 100644 --- a/be/src/vec/common/sort/sorter.h +++ b/be/src/vec/common/sort/sorter.h @@ -88,7 +88,7 @@ class MergeSorterState { bool is_spilled() const { return is_spilled_; } - const Block& last_sorted_block() const { return sorted_blocks_.back(); } + Block& last_sorted_block() { return sorted_blocks_.back(); } std::vector& get_sorted_block() { return sorted_blocks_; } std::priority_queue& get_priority_queue() { return priority_queue_; } diff --git a/be/src/vec/common/typeid_cast.h b/be/src/vec/common/typeid_cast.h index fefd38409fadfeb..85f99b492cdeb95 100644 --- a/be/src/vec/common/typeid_cast.h +++ b/be/src/vec/common/typeid_cast.h @@ -59,13 +59,18 @@ To typeid_cast(From& from) { template To typeid_cast(From* from) { +#ifndef NDEBUG try { if (typeid(*from) == typeid(std::remove_pointer_t)) { return static_cast(from); - } else { - return nullptr; } } catch (const std::exception& e) { throw doris::Exception(doris::ErrorCode::BAD_CAST, e.what()); } +#else + if (typeid(*from) == typeid(std::remove_pointer_t)) { + return static_cast(from); + } +#endif + return nullptr; } diff --git a/be/src/vec/core/block.cpp b/be/src/vec/core/block.cpp index 80a10c528ebb149..e3ce4885cb08af4 100644 --- a/be/src/vec/core/block.cpp +++ b/be/src/vec/core/block.cpp @@ -192,18 +192,6 @@ void Block::insert(ColumnWithTypeAndName&& elem) { data.emplace_back(std::move(elem)); } -void Block::insert_unique(const ColumnWithTypeAndName& elem) { - if (index_by_name.end() == index_by_name.find(elem.name)) { - insert(elem); - } -} - -void Block::insert_unique(ColumnWithTypeAndName&& elem) { - if (index_by_name.end() == index_by_name.find(elem.name)) { - insert(std::move(elem)); - } -} - void Block::erase(const std::set& positions) { for (unsigned long position : std::ranges::reverse_view(positions)) { erase(position); @@ -415,9 +403,9 @@ size_t Block::bytes() const { for (const auto& e : data) { ss << e.name + " "; } - LOG(FATAL) << fmt::format( - "Column {} in block is nullptr, in method bytes. All Columns are {}", elem.name, - ss.str()); + throw Exception(ErrorCode::INTERNAL_ERROR, + "Column {} in block is nullptr, in method bytes. All Columns are {}", + elem.name, ss.str()); } res += elem.column->byte_size(); } @@ -433,9 +421,9 @@ size_t Block::allocated_bytes() const { for (const auto& e : data) { ss << e.name + " "; } - LOG(FATAL) << fmt::format( - "Column {} in block is nullptr, in method allocated_bytes. All Columns are {}", - elem.name, ss.str()); + throw Exception(ErrorCode::INTERNAL_ERROR, + "Column {} in block is nullptr, in method bytes. All Columns are {}", + elem.name, ss.str()); } res += elem.column->allocated_bytes(); } @@ -568,6 +556,16 @@ Columns Block::get_columns() const { size_t num_columns = data.size(); Columns columns(num_columns); for (size_t i = 0; i < num_columns; ++i) { + columns[i] = data[i].column->convert_to_full_column_if_const(); + } + return columns; +} + +Columns Block::get_columns_and_convert() { + size_t num_columns = data.size(); + Columns columns(num_columns); + for (size_t i = 0; i < num_columns; ++i) { + data[i].column = data[i].column->convert_to_full_column_if_const(); columns[i] = data[i].column; } return columns; diff --git a/be/src/vec/core/block.h b/be/src/vec/core/block.h index 8433ebf074cbb7d..ec9e3835222f4c3 100644 --- a/be/src/vec/core/block.h +++ b/be/src/vec/core/block.h @@ -105,9 +105,6 @@ class Block { /// insert the column to the end void insert(const ColumnWithTypeAndName& elem); void insert(ColumnWithTypeAndName&& elem); - /// insert the column to the end, if there is no column with that name yet - void insert_unique(const ColumnWithTypeAndName& elem); - void insert_unique(ColumnWithTypeAndName&& elem); /// remove the column at the specified position void erase(size_t position); /// remove the column at the [start, end) @@ -214,6 +211,8 @@ class Block { Block clone_empty() const; Columns get_columns() const; + Columns get_columns_and_convert(); + void set_columns(const Columns& columns); Block clone_with_columns(const Columns& columns) const; Block clone_without_columns() const; diff --git a/be/src/vec/core/column_with_type_and_name.cpp b/be/src/vec/core/column_with_type_and_name.cpp index 9ac2bbe6e4476be..cd0f7194004073c 100644 --- a/be/src/vec/core/column_with_type_and_name.cpp +++ b/be/src/vec/core/column_with_type_and_name.cpp @@ -30,6 +30,7 @@ #include "vec/columns/column.h" #include "vec/core/types.h" #include "vec/data_types/data_type.h" +#include "vec/data_types/data_type_nullable.h" namespace doris::vectorized { @@ -87,4 +88,28 @@ void ColumnWithTypeAndName::to_pb_column_meta(PColumnMeta* col_meta) const { type->to_pb_column_meta(col_meta); } +ColumnWithTypeAndName ColumnWithTypeAndName::get_nested(bool replace_null_data_to_default) const { + if (type->is_nullable()) { + auto nested_type = assert_cast(type.get())->get_nested_type(); + ColumnPtr nested_column = column; + if (column) { + nested_column = nested_column->convert_to_full_column_if_const(); + const auto* source_column = assert_cast(nested_column.get()); + nested_column = source_column->get_nested_column_ptr(); + + if (replace_null_data_to_default) { + const auto& null_map = source_column->get_null_map_data(); + // only need to mutate nested column, avoid to copy nullmap + auto mutable_nested_col = (*std::move(nested_column)).mutate(); + mutable_nested_col->replace_column_null_data(null_map.data()); + + return {std::move(mutable_nested_col), nested_type, ""}; + } + } + return {nested_column, nested_type, ""}; + } else { + return {column, type, ""}; + } +} + } // namespace doris::vectorized diff --git a/be/src/vec/core/column_with_type_and_name.h b/be/src/vec/core/column_with_type_and_name.h index caf68f46260db1a..53ca6f20b2dd6db 100644 --- a/be/src/vec/core/column_with_type_and_name.h +++ b/be/src/vec/core/column_with_type_and_name.h @@ -25,6 +25,7 @@ #include #include #include +#include #include "vec/core/types.h" #include "vec/data_types/data_type.h" @@ -47,13 +48,13 @@ struct ColumnWithTypeAndName { DataTypePtr type; String name; - ColumnWithTypeAndName() {} - ColumnWithTypeAndName(const ColumnPtr& column_, const DataTypePtr& type_, const String& name_) - : column(column_), type(type_), name(name_) {} + ColumnWithTypeAndName() = default; + ColumnWithTypeAndName(ColumnPtr column_, DataTypePtr type_, String name_) + : column(std::move(column_)), type(std::move(type_)), name(std::move(name_)) {} /// Uses type->create_column() to create column - ColumnWithTypeAndName(const DataTypePtr& type_, const String& name_) - : column(type_->create_column()), type(type_), name(name_) {} + ColumnWithTypeAndName(const DataTypePtr& type_, String name_) + : column(type_->create_column()), type(type_), name(std::move(name_)) {} ColumnWithTypeAndName clone_empty() const; bool operator==(const ColumnWithTypeAndName& other) const; @@ -63,6 +64,8 @@ struct ColumnWithTypeAndName { std::string to_string(size_t row_num) const; void to_pb_column_meta(PColumnMeta* col_meta) const; + + ColumnWithTypeAndName get_nested(bool replace_null_data_to_default = false) const; }; } // namespace doris::vectorized diff --git a/be/src/vec/core/sort_cursor.h b/be/src/vec/core/sort_cursor.h index b43449b50b04724..9e40f40cf18c453 100644 --- a/be/src/vec/core/sort_cursor.h +++ b/be/src/vec/core/sort_cursor.h @@ -47,7 +47,7 @@ struct HeapSortCursorBlockView { private: void _reset() { sort_columns.clear(); - auto columns = block.get_columns(); + auto columns = block.get_columns_and_convert(); for (size_t j = 0, size = desc.size(); j < size; ++j) { auto& column_desc = desc[j]; size_t column_number = !column_desc.column_name.empty() @@ -161,7 +161,7 @@ struct MergeSortCursorImpl { MergeSortCursorImpl() = default; virtual ~MergeSortCursorImpl() = default; - MergeSortCursorImpl(const Block& block, const SortDescription& desc_) + MergeSortCursorImpl(Block& block, const SortDescription& desc_) : desc(desc_), sort_columns_size(desc.size()) { reset(block); } @@ -171,13 +171,11 @@ struct MergeSortCursorImpl { bool empty() const { return rows == 0; } /// Set the cursor to the beginning of the new block. - void reset(const Block& block) { reset(block.get_columns(), block); } - - /// Set the cursor to the beginning of the new block. - void reset(const Columns& columns, const Block& block) { + void reset(Block& block) { all_columns.clear(); sort_columns.clear(); + auto columns = block.get_columns_and_convert(); size_t num_columns = columns.size(); for (size_t j = 0; j < num_columns; ++j) { diff --git a/be/src/vec/data_types/data_type.h b/be/src/vec/data_types/data_type.h index 7807f2a5d587fb4..8fa41f88886a180 100644 --- a/be/src/vec/data_types/data_type.h +++ b/be/src/vec/data_types/data_type.h @@ -199,10 +199,6 @@ class IDataType : private boost::noncopyable { virtual bool is_nullable() const { return false; } - /** Is this type can represent only NULL value? (It also implies is_nullable) - */ - virtual bool only_null() const { return false; } - /* the data type create from type_null, NULL literal*/ virtual bool is_null_literal() const { return false; } diff --git a/be/src/vec/data_types/data_type_nullable.cpp b/be/src/vec/data_types/data_type_nullable.cpp index 1e4ecb1cf29f437..f160a1f323a8ce1 100644 --- a/be/src/vec/data_types/data_type_nullable.cpp +++ b/be/src/vec/data_types/data_type_nullable.cpp @@ -49,10 +49,6 @@ DataTypeNullable::DataTypeNullable(const DataTypePtr& nested_data_type_) } } -bool DataTypeNullable::only_null() const { - return typeid_cast(nested_data_type.get()); -} - std::string DataTypeNullable::to_string(const IColumn& column, size_t row_num) const { auto result = check_column_const_set_readability(column, row_num); ColumnPtr ptr = result.first; diff --git a/be/src/vec/data_types/data_type_nullable.h b/be/src/vec/data_types/data_type_nullable.h index fc958096adfe94a..12410b70bd13223 100644 --- a/be/src/vec/data_types/data_type_nullable.h +++ b/be/src/vec/data_types/data_type_nullable.h @@ -109,7 +109,6 @@ class DataTypeNullable final : public IDataType { } bool is_nullable() const override { return true; } size_t get_size_of_value_in_memory() const override; - bool only_null() const override; bool can_be_inside_low_cardinality() const override { return nested_data_type->can_be_inside_low_cardinality(); } diff --git a/be/src/vec/data_types/get_least_supertype.cpp b/be/src/vec/data_types/get_least_supertype.cpp index 49c5bacce10f77f..8d5662b9bed801a 100644 --- a/be/src/vec/data_types/get_least_supertype.cpp +++ b/be/src/vec/data_types/get_least_supertype.cpp @@ -291,9 +291,7 @@ void get_least_supertype(const DataTypes& types, DataTypePtr* type) { typeid_cast(type.get())) { have_nullable = true; - if (!type_nullable->only_null()) { - nested_types.emplace_back(type_nullable->get_nested_type()); - } + nested_types.emplace_back(type_nullable->get_nested_type()); } else { nested_types.emplace_back(type); } diff --git a/be/src/vec/exec/join/vhash_join_node.cpp b/be/src/vec/exec/join/vhash_join_node.cpp index c65513c807cf51c..94cb5be876f941a 100644 --- a/be/src/vec/exec/join/vhash_join_node.cpp +++ b/be/src/vec/exec/join/vhash_join_node.cpp @@ -662,7 +662,7 @@ Status HashJoinNode::alloc_resource(doris::RuntimeState* state) { SCOPED_TIMER(_allocate_resource_timer); RETURN_IF_ERROR(VJoinNodeBase::alloc_resource(state)); for (size_t i = 0; i < _runtime_filter_descs.size(); i++) { - if (auto bf = _runtime_filters[i]->get_bloomfilter()) { + if (auto* bf = _runtime_filters[i]->get_bloomfilter()) { RETURN_IF_ERROR(bf->init_with_fixed_length()); } } @@ -751,23 +751,8 @@ Status HashJoinNode::sink(doris::RuntimeState* state, vectorized::Block* in_bloc DCHECK(!_build_side_mutable_block.empty()); _build_block = std::make_shared(_build_side_mutable_block.to_block()); COUNTER_UPDATE(_build_blocks_memory_usage, _build_block->bytes()); + RETURN_IF_ERROR(process_runtime_filter_build(state, _build_block.get(), this)); RETURN_IF_ERROR(_process_build_block(state, *_build_block)); - auto ret = std::visit(Overload {[&](std::monostate&) -> Status { - LOG(FATAL) << "FATAL: uninited hash table"; - __builtin_unreachable(); - }, - [&](auto&& arg) -> Status { - ProcessRuntimeFilterBuild runtime_filter_build_process; - return runtime_filter_build_process(state, arg, this); - }}, - *_hash_table_variants); - if (!ret.ok()) { - if (_shared_hashtable_controller) { - _shared_hash_table_context->status = ret; - _shared_hashtable_controller->signal(id()); - } - return ret; - } if (_shared_hashtable_controller) { _shared_hash_table_context->status = Status::OK(); // arena will be shared with other instances. @@ -949,9 +934,6 @@ void HashJoinNode::_set_build_ignore_flag(Block& block, const std::vector& Status HashJoinNode::_process_build_block(RuntimeState* state, Block& block) { SCOPED_TIMER(_build_table_timer); size_t rows = block.rows(); - if (UNLIKELY(rows == 0)) { - return Status::OK(); - } COUNTER_UPDATE(_build_rows_counter, rows); ColumnRawPtrs raw_ptrs(_build_expr_ctxs.size()); diff --git a/be/src/vec/exec/join/vhash_join_node.h b/be/src/vec/exec/join/vhash_join_node.h index be94dacdcaef3b5..b9b3d18dff7198a 100644 --- a/be/src/vec/exec/join/vhash_join_node.h +++ b/be/src/vec/exec/join/vhash_join_node.h @@ -74,33 +74,28 @@ template struct ProcessHashTableProbe; class HashJoinNode; -struct ProcessRuntimeFilterBuild { - template - Status operator()(RuntimeState* state, HashTableContext& hash_table_ctx, Parent* parent, - bool is_global = false) { - if (parent->runtime_filter_descs().empty()) { - return Status::OK(); - } - parent->_runtime_filter_slots = std::make_shared( - parent->_build_expr_ctxs, parent->runtime_filter_descs(), is_global); - - RETURN_IF_ERROR( - parent->_runtime_filter_slots->init(state, hash_table_ctx.hash_table->size())); +template +Status process_runtime_filter_build(RuntimeState* state, Block* block, Parent* parent, + bool is_global = false) { + if (parent->runtime_filter_descs().empty()) { + return Status::OK(); + } + parent->_runtime_filter_slots = std::make_shared( + parent->_build_expr_ctxs, parent->runtime_filter_descs(), is_global); - if (!parent->_runtime_filter_slots->empty() && !parent->_inserted_blocks.empty()) { - { - SCOPED_TIMER(parent->_runtime_filter_compute_timer); - parent->_runtime_filter_slots->insert(parent->_inserted_blocks); - } - } - { - SCOPED_TIMER(parent->_publish_runtime_filter_timer); - RETURN_IF_ERROR(parent->_runtime_filter_slots->publish()); - } + RETURN_IF_ERROR(parent->_runtime_filter_slots->init(state, block->rows())); - return Status::OK(); + if (!parent->_runtime_filter_slots->empty() && block->rows() > 1) { + SCOPED_TIMER(parent->_runtime_filter_compute_timer); + parent->_runtime_filter_slots->insert(block); + } + { + SCOPED_TIMER(parent->_publish_runtime_filter_timer); + RETURN_IF_ERROR(parent->_runtime_filter_slots->publish()); } -}; + + return Status::OK(); +} using ProfileCounter = RuntimeProfile::Counter; @@ -129,10 +124,6 @@ struct ProcessHashTableBuild { } } - if (!_parent->runtime_filter_descs().empty()) { - _parent->_inserted_blocks.insert(&_acquired_block); - } - SCOPED_TIMER(_parent->_build_table_insert_timer); hash_table_ctx.hash_table->template prepare_build(_rows, _batch_size, *has_null_key); @@ -414,10 +405,11 @@ class HashJoinNode final : public VJoinNodeBase { template friend struct ProcessHashTableProbe; - friend struct ProcessRuntimeFilterBuild; + template + friend Status process_runtime_filter_build(RuntimeState* state, vectorized::Block* block, + Parent* parent, bool is_global); std::vector _runtime_filter_descs; - std::unordered_set _inserted_blocks; std::vector _runtime_filters; std::atomic_bool _probe_open_finish = false; diff --git a/be/src/vec/exec/scan/new_olap_scan_node.cpp b/be/src/vec/exec/scan/new_olap_scan_node.cpp index 8ac1af004d90a44..e1f39f2948b3145 100644 --- a/be/src/vec/exec/scan/new_olap_scan_node.cpp +++ b/be/src/vec/exec/scan/new_olap_scan_node.cpp @@ -80,21 +80,6 @@ NewOlapScanNode::NewOlapScanNode(ObjectPool* pool, const TPlanNode& tnode, } } -Status NewOlapScanNode::collect_query_statistics(QueryStatistics* statistics) { - RETURN_IF_ERROR(ExecNode::collect_query_statistics(statistics)); - if (!_is_pipeline_scan || _should_create_scanner) { - statistics->add_scan_bytes(_byte_read_counter->value()); - statistics->add_scan_rows(_rows_read_counter->value()); - statistics->add_cpu_ms(_scan_cpu_timer->value() / NANOS_PER_MILLIS); - } - return Status::OK(); -} - -Status NewOlapScanNode::collect_query_statistics(QueryStatistics* statistics, int) { - RETURN_IF_ERROR(collect_query_statistics(statistics)); - return Status::OK(); -} - Status NewOlapScanNode::prepare(RuntimeState* state) { RETURN_IF_ERROR(VScanNode::prepare(state)); // if you want to add some profile in scan node, even it have not new VScanner object diff --git a/be/src/vec/exec/scan/new_olap_scan_node.h b/be/src/vec/exec/scan/new_olap_scan_node.h index 309bac56991dd04..ca357b7eb7df1d3 100644 --- a/be/src/vec/exec/scan/new_olap_scan_node.h +++ b/be/src/vec/exec/scan/new_olap_scan_node.h @@ -63,8 +63,6 @@ class NewOlapScanNode : public VScanNode { friend class doris::pipeline::OlapScanOperator; Status prepare(RuntimeState* state) override; - Status collect_query_statistics(QueryStatistics* statistics) override; - Status collect_query_statistics(QueryStatistics* statistics, int sender_id) override; void set_scan_ranges(RuntimeState* state, const std::vector& scan_ranges) override; diff --git a/be/src/vec/exec/scan/vscan_node.cpp b/be/src/vec/exec/scan/vscan_node.cpp index 2a56a733b48aecf..cf52341b34f2dc7 100644 --- a/be/src/vec/exec/scan/vscan_node.cpp +++ b/be/src/vec/exec/scan/vscan_node.cpp @@ -1332,6 +1332,9 @@ Status VScanNode::_prepare_scanners(const int query_parallel_instance_num) { if (scanners.empty()) { _eos = true; } else { + for (auto& scanner : scanners) { + scanner->set_query_statistics(_query_statistics.get()); + } COUNTER_SET(_num_scanners, static_cast(scanners.size())); _start_scanners(_scanners, query_parallel_instance_num); } diff --git a/be/src/vec/exec/scan/vscanner.cpp b/be/src/vec/exec/scan/vscanner.cpp index 0a7a8c9c019c09f..2cdd1d503bbc8b2 100644 --- a/be/src/vec/exec/scan/vscanner.cpp +++ b/be/src/vec/exec/scan/vscanner.cpp @@ -102,6 +102,8 @@ Status VScanner::get_block(RuntimeState* state, Block* block, bool* eof) { } } + int64_t old_scan_rows = _num_rows_read; + int64_t old_scan_bytes = _num_byte_read; { do { // if step 2 filter all rows of block, and block will be reused to get next rows, @@ -133,6 +135,11 @@ Status VScanner::get_block(RuntimeState* state, Block* block, bool* eof) { _num_rows_read < rows_read_threshold); } + if (_query_statistics) { + _query_statistics->add_scan_rows(_num_rows_read - old_scan_rows); + _query_statistics->add_scan_bytes(_num_byte_read - old_scan_bytes); + } + if (state->is_cancelled()) { return Status::Cancelled("cancelled"); } diff --git a/be/src/vec/exec/scan/vscanner.h b/be/src/vec/exec/scan/vscanner.h index 9fdaddcea216531..23ddb65629cd064 100644 --- a/be/src/vec/exec/scan/vscanner.h +++ b/be/src/vec/exec/scan/vscanner.h @@ -32,6 +32,7 @@ namespace doris { class RuntimeProfile; class TupleDescriptor; +class QueryStatistics; namespace vectorized { class VExprContext; @@ -148,6 +149,10 @@ class VScanner { void set_status_on_failure(const Status& st) { _status = st; } + void set_query_statistics(QueryStatistics* query_statistics) { + _query_statistics = query_statistics; + } + protected: void _discard_conjuncts() { for (auto& conjunct : _conjuncts) { @@ -159,6 +164,8 @@ class VScanner { RuntimeState* _state = nullptr; VScanNode* _parent = nullptr; pipeline::ScanLocalStateBase* _local_state = nullptr; + QueryStatistics* _query_statistics = nullptr; + // Set if scan node has sort limit info int64_t _limit = -1; diff --git a/be/src/vec/exec/vexchange_node.cpp b/be/src/vec/exec/vexchange_node.cpp index 6baf6749ed1d335..5b3e38af56a120a 100644 --- a/be/src/vec/exec/vexchange_node.cpp +++ b/be/src/vec/exec/vexchange_node.cpp @@ -63,12 +63,11 @@ Status VExchangeNode::prepare(RuntimeState* state) { RETURN_IF_ERROR(ExecNode::prepare(state)); SCOPED_TIMER(_exec_timer); DCHECK_GT(_num_senders, 0); - _sub_plan_query_statistics_recvr.reset(new QueryStatisticsRecvr()); CHECK(state->exec_env() != nullptr); CHECK(state->exec_env()->vstream_mgr() != nullptr); _stream_recvr = state->exec_env()->vstream_mgr()->create_recvr( state, _input_row_desc, state->fragment_instance_id(), _id, _num_senders, - _runtime_profile.get(), _is_merging, _sub_plan_query_statistics_recvr); + _runtime_profile.get(), _is_merging); if (_is_merging) { RETURN_IF_ERROR(_vsort_exec_exprs.prepare(state, _row_descriptor, _row_descriptor)); @@ -145,20 +144,6 @@ void VExchangeNode::release_resource(RuntimeState* state) { ExecNode::release_resource(state); } -Status VExchangeNode::collect_query_statistics(QueryStatistics* statistics) { - RETURN_IF_ERROR(ExecNode::collect_query_statistics(statistics)); - if (!statistics->collect_dml_statistics()) { - statistics->merge(_sub_plan_query_statistics_recvr.get()); - } - return Status::OK(); -} -Status VExchangeNode::collect_query_statistics(QueryStatistics* statistics, int sender_id) { - RETURN_IF_ERROR(ExecNode::collect_query_statistics(statistics)); - if (!statistics->collect_dml_statistics()) { - statistics->merge(_sub_plan_query_statistics_recvr.get(), sender_id); - } - return Status::OK(); -} Status VExchangeNode::close(RuntimeState* state) { if (is_closed()) { return Status::OK(); diff --git a/be/src/vec/exec/vexchange_node.h b/be/src/vec/exec/vexchange_node.h index 94302e84d9bdaca..e49eb86a92c1106 100644 --- a/be/src/vec/exec/vexchange_node.h +++ b/be/src/vec/exec/vexchange_node.h @@ -32,7 +32,6 @@ namespace doris { class DorisNodesInfo; class ObjectPool; class QueryStatistics; -class QueryStatisticsRecvr; class RuntimeState; class TPlanNode; @@ -55,8 +54,6 @@ class VExchangeNode : public ExecNode { Status open(RuntimeState* state) override; Status get_next(RuntimeState* state, Block* row_batch, bool* eos) override; void release_resource(RuntimeState* state) override; - Status collect_query_statistics(QueryStatistics* statistics) override; - Status collect_query_statistics(QueryStatistics* statistics, int sender_id) override; Status close(RuntimeState* state) override; void set_num_senders(int num_senders) { _num_senders = num_senders; } @@ -67,7 +64,6 @@ class VExchangeNode : public ExecNode { bool _is_ready; std::shared_ptr _stream_recvr; RowDescriptor _input_row_desc; - std::shared_ptr _sub_plan_query_statistics_recvr; // use in merge sort size_t _offset; diff --git a/be/src/vec/exec/vjdbc_connector.cpp b/be/src/vec/exec/vjdbc_connector.cpp index 56205d2ffa9b7ac..c80d84f98be8b69 100644 --- a/be/src/vec/exec/vjdbc_connector.cpp +++ b/be/src/vec/exec/vjdbc_connector.cpp @@ -81,7 +81,7 @@ Status JdbcConnector::close(Status /*unused*/) { return Status::OK(); } if (_is_in_transaction) { - static_cast(abort_trans()); + RETURN_IF_ERROR(abort_trans()); } JNIEnv* env; RETURN_IF_ERROR(JniUtil::GetJNIEnv(&env)); @@ -117,7 +117,7 @@ Status JdbcConnector::open(RuntimeState* state, bool read) { { std::string local_location; std::hash hash_str; - auto function_cache = UserFunctionCache::instance(); + auto* function_cache = UserFunctionCache::instance(); if (_conn_param.resource_name.empty()) { // for jdbcExternalTable, _conn_param.resource_name == "" // so, we use _conn_param.driver_path as key of jarpath @@ -164,7 +164,7 @@ Status JdbcConnector::open(RuntimeState* state, bool read) { RETURN_ERROR_IF_EXC(env); RETURN_IF_ERROR(JniUtil::LocalToGlobalRef(env, _executor_obj, &_executor_obj)); _is_open = true; - static_cast(begin_trans()); + RETURN_IF_ERROR(begin_trans()); return Status::OK(); } @@ -622,11 +622,11 @@ Status JdbcConnector::_cast_string_to_special(Block* block, JNIEnv* env, size_t RETURN_IF_ERROR(JniUtil::GetJniExceptionMsg(env)); if (slot_desc->type().is_hll_type()) { - static_cast(_cast_string_to_hll(slot_desc, block, column_index, num_rows)); + RETURN_IF_ERROR(_cast_string_to_hll(slot_desc, block, column_index, num_rows)); } else if (slot_desc->type().is_json_type()) { - static_cast(_cast_string_to_json(slot_desc, block, column_index, num_rows)); + RETURN_IF_ERROR(_cast_string_to_json(slot_desc, block, column_index, num_rows)); } else if (slot_desc->type().is_bitmap_type()) { - static_cast(_cast_string_to_bitmap(slot_desc, block, column_index, num_rows)); + RETURN_IF_ERROR(_cast_string_to_bitmap(slot_desc, block, column_index, num_rows)); } } return Status::OK(); @@ -654,7 +654,7 @@ Status JdbcConnector::_cast_string_to_hll(const SlotDescriptor* slot_desc, Block Block cast_block(argument_template); int result_idx = cast_block.columns(); cast_block.insert({nullptr, make_nullable(_target_data_type), "cast_result"}); - static_cast(func_cast->execute(nullptr, cast_block, {0, 1}, result_idx, rows)); + RETURN_IF_ERROR(func_cast->execute(nullptr, cast_block, {0}, result_idx, rows)); auto res_col = cast_block.get_by_position(result_idx).column; block->get_by_position(column_index).type = _target_data_type; @@ -691,7 +691,7 @@ Status JdbcConnector::_cast_string_to_bitmap(const SlotDescriptor* slot_desc, Bl Block cast_block(argument_template); int result_idx = cast_block.columns(); cast_block.insert({nullptr, make_nullable(_target_data_type), "cast_result"}); - static_cast(func_cast->execute(nullptr, cast_block, {0, 1}, result_idx, rows)); + RETURN_IF_ERROR(func_cast->execute(nullptr, cast_block, {0}, result_idx, rows)); auto res_col = cast_block.get_by_position(result_idx).column; block->get_by_position(column_index).type = _target_data_type; @@ -728,7 +728,7 @@ Status JdbcConnector::_cast_string_to_json(const SlotDescriptor* slot_desc, Bloc Block cast_block(argument_template); int result_idx = cast_block.columns(); cast_block.insert({nullptr, make_nullable(_target_data_type), "cast_result"}); - static_cast(func_cast->execute(nullptr, cast_block, {0, 1}, result_idx, rows)); + RETURN_IF_ERROR(func_cast->execute(nullptr, cast_block, {0}, result_idx, rows)); auto res_col = cast_block.get_by_position(result_idx).column; block->get_by_position(column_index).type = _target_data_type; diff --git a/be/src/vec/exprs/vcast_expr.cpp b/be/src/vec/exprs/vcast_expr.cpp index 47733a177db238b..3207ba5b5419a6c 100644 --- a/be/src/vec/exprs/vcast_expr.cpp +++ b/be/src/vec/exprs/vcast_expr.cpp @@ -106,9 +106,6 @@ doris::Status VCastExpr::execute(VExprContext* context, doris::vectorized::Block int column_id = 0; RETURN_IF_ERROR(_children[0]->execute(context, block, &column_id)); - size_t const_param_id = VExpr::insert_param( - block, {_cast_param, _cast_param_data_type, _target_data_type_name}, block->rows()); - // call function size_t num_columns_without_result = block->columns(); // prepare a column to save result @@ -117,8 +114,8 @@ doris::Status VCastExpr::execute(VExprContext* context, doris::vectorized::Block auto state = Status::OK(); try { state = _function->execute(context->fn_context(_fn_context_index), *block, - {static_cast(column_id), const_param_id}, - num_columns_without_result, block->rows(), false); + {static_cast(column_id)}, num_columns_without_result, + block->rows(), false); *result_column_id = num_columns_without_result; } catch (const Exception& e) { state = e.to_status(); diff --git a/be/src/vec/functions/function.cpp b/be/src/vec/functions/function.cpp index 6e7f6572ab86672..8df03496c11fbe0 100644 --- a/be/src/vec/functions/function.cpp +++ b/be/src/vec/functions/function.cpp @@ -48,44 +48,36 @@ ColumnPtr wrap_in_nullable(const ColumnPtr& src, const Block& block, const Colum ColumnPtr src_not_nullable = src; MutableColumnPtr mutable_result_null_map_column; - if (auto* nullable = check_and_get_column(*src)) { + if (const auto* nullable = check_and_get_column(*src)) { src_not_nullable = nullable->get_nested_column_ptr(); result_null_map_column = nullable->get_null_map_column_ptr(); } for (const auto& arg : args) { const ColumnWithTypeAndName& elem = block.get_by_position(arg); - if (!elem.type->is_nullable()) { + if (!elem.type->is_nullable() || is_column_const(*elem.column)) { continue; } - bool is_const = is_column_const(*elem.column); - /// Const Nullable that are NULL. - if (is_const && assert_cast(elem.column.get())->only_null()) { - return block.get_by_position(result).type->create_column_const(input_rows_count, - Null()); - } - if (is_const) { - continue; - } - - if (auto* nullable = assert_cast(elem.column.get())) { + if (const auto* nullable = assert_cast(elem.column.get()); + nullable->has_null()) { const ColumnPtr& null_map_column = nullable->get_null_map_column_ptr(); if (!result_null_map_column) { result_null_map_column = null_map_column->clone_resized(input_rows_count); - } else { - if (!mutable_result_null_map_column) { - mutable_result_null_map_column = - std::move(result_null_map_column)->assume_mutable(); - } - - NullMap& result_null_map = - assert_cast(*mutable_result_null_map_column).get_data(); - const NullMap& src_null_map = - assert_cast(*null_map_column).get_data(); - - VectorizedUtils::update_null_map(result_null_map, src_null_map); + continue; } + + if (!mutable_result_null_map_column) { + mutable_result_null_map_column = + std::move(result_null_map_column)->assume_mutable(); + } + + NullMap& result_null_map = + assert_cast(*mutable_result_null_map_column).get_data(); + const NullMap& src_null_map = + assert_cast(*null_map_column).get_data(); + + VectorizedUtils::update_null_map(result_null_map, src_null_map); } } @@ -99,45 +91,22 @@ ColumnPtr wrap_in_nullable(const ColumnPtr& src, const Block& block, const Colum return ColumnNullable::create(src, ColumnUInt8::create(input_rows_count, 0)); } - return ColumnNullable::create(src_not_nullable->convert_to_full_column_if_const(), - result_null_map_column); + return ColumnNullable::create(src_not_nullable, result_null_map_column); } -NullPresence get_null_presence(const Block& block, const ColumnNumbers& args) { - NullPresence res; - - for (const auto& arg : args) { - const auto& elem = block.get_by_position(arg); - - if (!res.has_nullable) { - res.has_nullable = elem.type->is_nullable(); - } - if (!res.has_null_constant) { - res.has_null_constant = elem.type->only_null(); - } - } - - return res; +bool get_null_presence(const Block& block, const ColumnNumbers& args) { + return std::ranges::any_of(args, [&block](const auto& elem) { + return block.get_by_position(elem).type->is_nullable(); + }); } -[[maybe_unused]] NullPresence get_null_presence(const ColumnsWithTypeAndName& args) { - NullPresence res; - - for (const auto& elem : args) { - if (!res.has_nullable) { - res.has_nullable = elem.type->is_nullable(); - } - if (!res.has_null_constant) { - res.has_null_constant = elem.type->only_null(); - } - } - - return res; +bool get_null_presence(const ColumnsWithTypeAndName& args) { + return std::ranges::any_of(args, [](const auto& elem) { return elem.type->is_nullable(); }); } inline Status PreparedFunctionImpl::_execute_skipped_constant_deal( FunctionContext* context, Block& block, const ColumnNumbers& args, size_t result, - size_t input_rows_count, bool dry_run) { + size_t input_rows_count, bool dry_run) const { bool executed = false; RETURN_IF_ERROR(default_implementation_for_nulls(context, block, args, result, input_rows_count, dry_run, &executed)); @@ -154,7 +123,7 @@ inline Status PreparedFunctionImpl::_execute_skipped_constant_deal( Status PreparedFunctionImpl::default_implementation_for_constant_arguments( FunctionContext* context, Block& block, const ColumnNumbers& args, size_t result, - size_t input_rows_count, bool dry_run, bool* executed) { + size_t input_rows_count, bool dry_run, bool* executed) const { *executed = false; ColumnNumbers args_expect_const = get_arguments_that_are_always_constant(); @@ -218,45 +187,51 @@ Status PreparedFunctionImpl::default_implementation_for_constant_arguments( Status PreparedFunctionImpl::default_implementation_for_nulls( FunctionContext* context, Block& block, const ColumnNumbers& args, size_t result, - size_t input_rows_count, bool dry_run, bool* executed) { + size_t input_rows_count, bool dry_run, bool* executed) const { *executed = false; if (args.empty() || !use_default_implementation_for_nulls()) { return Status::OK(); } - NullPresence null_presence = get_null_presence(block, args); - - if (null_presence.has_null_constant) { + if (std::ranges::any_of(args, [&block](const auto& elem) { + return block.get_by_position(elem).column->only_null(); + })) { block.get_by_position(result).column = block.get_by_position(result).type->create_column_const(input_rows_count, Null()); *executed = true; return Status::OK(); } - if (null_presence.has_nullable) { - bool check_overflow_for_decimal = false; + if (get_null_presence(block, args)) { + bool need_to_default = need_replace_null_data_to_default(); if (context) { - check_overflow_for_decimal = context->check_overflow_for_decimal(); + need_to_default &= context->check_overflow_for_decimal(); + } + ColumnNumbers new_args; + for (auto arg : args) { + new_args.push_back(block.columns()); + block.insert(block.get_by_position(arg).get_nested(need_to_default)); + DCHECK(!block.get_by_position(new_args.back()).column->is_nullable()); } - auto [temporary_block, new_args, new_result] = create_block_with_nested_columns( - block, args, result, - check_overflow_for_decimal && need_replace_null_data_to_default()); - RETURN_IF_ERROR(execute_without_low_cardinality_columns( - context, temporary_block, new_args, new_result, temporary_block.rows(), dry_run)); - block.get_by_position(result).column = - wrap_in_nullable(temporary_block.get_by_position(new_result).column, block, args, - result, input_rows_count); + RETURN_IF_ERROR(execute_without_low_cardinality_columns(context, block, new_args, result, + block.rows(), dry_run)); + block.get_by_position(result).column = wrap_in_nullable( + block.get_by_position(result).column, block, args, result, input_rows_count); + + while (!new_args.empty()) { + block.erase(new_args.back()); + new_args.pop_back(); + } *executed = true; return Status::OK(); } - *executed = false; return Status::OK(); } Status PreparedFunctionImpl::execute_without_low_cardinality_columns( FunctionContext* context, Block& block, const ColumnNumbers& args, size_t result, - size_t input_rows_count, bool dry_run) { + size_t input_rows_count, bool dry_run) const { bool executed = false; RETURN_IF_ERROR(default_implementation_for_constant_arguments( @@ -270,7 +245,7 @@ Status PreparedFunctionImpl::execute_without_low_cardinality_columns( Status PreparedFunctionImpl::execute(FunctionContext* context, Block& block, const ColumnNumbers& args, size_t result, - size_t input_rows_count, bool dry_run) { + size_t input_rows_count, bool dry_run) const { return execute_without_low_cardinality_columns(context, block, args, result, input_rows_count, dry_run); } @@ -292,12 +267,7 @@ DataTypePtr FunctionBuilderImpl::get_return_type_without_low_cardinality( check_number_of_arguments(arguments.size()); if (!arguments.empty() && use_default_implementation_for_nulls()) { - NullPresence null_presence = get_null_presence(arguments); - - if (null_presence.has_null_constant) { - return make_nullable(std::make_shared()); - } - if (null_presence.has_nullable) { + if (get_null_presence(arguments)) { ColumnNumbers numbers(arguments.size()); std::iota(numbers.begin(), numbers.end(), 0); auto [nested_block, _] = diff --git a/be/src/vec/functions/function.h b/be/src/vec/functions/function.h index 1b4c9fe128c32a1..0ca2899d7481d5c 100644 --- a/be/src/vec/functions/function.h +++ b/be/src/vec/functions/function.h @@ -60,18 +60,13 @@ template auto has_variadic_argument_types(T&& arg) -> decltype(T::get_variadic_argument_types()) {}; void has_variadic_argument_types(...); -struct NullPresence { - bool has_nullable = false; - bool has_null_constant = false; -}; - template concept HasGetVariadicArgumentTypesImpl = requires(T t) { { t.get_variadic_argument_types_impl() } -> std::same_as; }; -NullPresence get_null_presence(const Block& block, const ColumnNumbers& args); -[[maybe_unused]] NullPresence get_null_presence(const ColumnsWithTypeAndName& args); +bool get_null_presence(const Block& block, const ColumnNumbers& args); +bool get_null_presence(const ColumnsWithTypeAndName& args); /// The simplest executable object. /// Motivation: @@ -85,7 +80,7 @@ class IPreparedFunction { virtual String get_name() const = 0; virtual Status execute(FunctionContext* context, Block& block, const ColumnNumbers& arguments, - size_t result, size_t input_rows_count, bool dry_run) = 0; + size_t result, size_t input_rows_count, bool dry_run) const = 0; }; using PreparedFunctionPtr = std::shared_ptr; @@ -93,7 +88,7 @@ using PreparedFunctionPtr = std::shared_ptr; class PreparedFunctionImpl : public IPreparedFunction { public: Status execute(FunctionContext* context, Block& block, const ColumnNumbers& arguments, - size_t result, size_t input_rows_count, bool dry_run = false) final; + size_t result, size_t input_rows_count, bool dry_run = false) const final; /** If the function have non-zero number of arguments, * and if all arguments are constant, that we could automatically provide default implementation: @@ -111,7 +106,7 @@ class PreparedFunctionImpl : public IPreparedFunction { protected: virtual Status execute_impl_dry_run(FunctionContext* context, Block& block, const ColumnNumbers& arguments, size_t result, - size_t input_rows_count) { + size_t input_rows_count) const { return execute_impl(context, block, arguments, result, input_rows_count); } @@ -141,17 +136,18 @@ class PreparedFunctionImpl : public IPreparedFunction { private: Status default_implementation_for_nulls(FunctionContext* context, Block& block, const ColumnNumbers& args, size_t result, - size_t input_rows_count, bool dry_run, bool* executed); + size_t input_rows_count, bool dry_run, + bool* executed) const; Status default_implementation_for_constant_arguments(FunctionContext* context, Block& block, const ColumnNumbers& args, size_t result, size_t input_rows_count, bool dry_run, - bool* executed); + bool* executed) const; Status execute_without_low_cardinality_columns(FunctionContext* context, Block& block, const ColumnNumbers& arguments, size_t result, - size_t input_rows_count, bool dry_run); + size_t input_rows_count, bool dry_run) const; Status _execute_skipped_constant_deal(FunctionContext* context, Block& block, const ColumnNumbers& args, size_t result, - size_t input_rows_count, bool dry_run); + size_t input_rows_count, bool dry_run) const; }; /// Function with known arguments and return type. @@ -178,7 +174,7 @@ class IFunctionBase { /// TODO: make const virtual Status execute(FunctionContext* context, Block& block, const ColumnNumbers& arguments, - size_t result, size_t input_rows_count, bool dry_run = false) { + size_t result, size_t input_rows_count, bool dry_run = false) const { return prepare(context, block, arguments, result) ->execute(context, block, arguments, result, input_rows_count, dry_run); } @@ -511,7 +507,7 @@ class DefaultExecutable final : public PreparedFunctionImpl { } Status execute_impl_dry_run(FunctionContext* context, Block& block, const ColumnNumbers& arguments, size_t result, - size_t input_rows_count) final { + size_t input_rows_count) const final { return function->execute_impl_dry_run(context, block, arguments, result, input_rows_count); } bool use_default_implementation_for_nulls() const final { diff --git a/be/src/vec/functions/function_bitmap.cpp b/be/src/vec/functions/function_bitmap.cpp index ac80542f63349f0..08b15098755dd76 100644 --- a/be/src/vec/functions/function_bitmap.cpp +++ b/be/src/vec/functions/function_bitmap.cpp @@ -700,12 +700,7 @@ Status execute_bitmap_op_count_null_to_zero( size_t input_rows_count, const std::function& exec_impl_func) { - NullPresence null_presence = get_null_presence(block, arguments); - - if (null_presence.has_null_constant) { - block.get_by_position(result).column = - block.get_by_position(result).type->create_column_const(input_rows_count, 0); - } else if (null_presence.has_nullable) { + if (get_null_presence(block, arguments)) { auto [temporary_block, new_args, new_result] = create_block_with_nested_columns(block, arguments, result); RETURN_IF_ERROR(exec_impl_func(context, temporary_block, new_args, new_result, diff --git a/be/src/vec/functions/function_cast.h b/be/src/vec/functions/function_cast.h index 57678b02b6880c9..82e4b3d972bfa48 100644 --- a/be/src/vec/functions/function_cast.h +++ b/be/src/vec/functions/function_cast.h @@ -94,6 +94,7 @@ #include "vec/functions/function_helpers.h" #include "vec/io/reader_buffer.h" #include "vec/runtime/vdatetime_value.h" +#include "vec/utils/util.hpp" class DateLUTImpl; @@ -1454,12 +1455,7 @@ class PreparedFunctionCast : public PreparedFunctionImpl { protected: Status execute_impl(FunctionContext* context, Block& block, const ColumnNumbers& arguments, size_t result, size_t input_rows_count) const override { - /// drop second argument, pass others - ColumnNumbers new_arguments {arguments.front()}; - if (arguments.size() > 2) - new_arguments.insert(std::end(new_arguments), std::next(std::begin(arguments), 2), - std::end(arguments)); - return wrapper_function(context, block, new_arguments, result, input_rows_count); + return wrapper_function(context, block, arguments, result, input_rows_count); } bool use_default_implementation_for_nulls() const override { return false; } @@ -1547,7 +1543,7 @@ struct ConvertThroughParsing { res == StringParser::PARSE_OVERFLOW || res == StringParser::PARSE_UNDERFLOW); } else if constexpr (IsDataTypeDateTimeV2) { - auto type = check_and_get_data_type( + const auto* type = assert_cast( block.get_by_position(result).type.get()); parsed = try_parse_impl(vec_to[i], read_buffer, context->state()->timezone_obj(), @@ -2251,7 +2247,7 @@ class FunctionCast final : public IFunctionBase { const auto& from_nested = from_type; const auto& to_nested = to_type; - if (from_type->only_null() || from_type->is_null_literal()) { + if (from_type->is_null_literal()) { if (!to_nested->is_nullable()) { return create_unsupport_wrapper("Cannot convert NULL to a non-nullable type"); } @@ -2276,82 +2272,32 @@ class FunctionCast final : public IFunctionBase { const DataTypePtr& to_type, bool skip_not_null_check) const { /// Determine whether pre-processing and/or post-processing must take place during conversion. - bool source_is_nullable = from_type->is_nullable(); bool result_is_nullable = to_type->is_nullable(); - auto wrapper = prepare_impl(context, remove_nullable(from_type), remove_nullable(to_type), - result_is_nullable); - if (result_is_nullable) { - return [wrapper, source_is_nullable](FunctionContext* context, Block& block, - const ColumnNumbers& arguments, - const size_t result, size_t input_rows_count) { - /// Create a temporary block on which to perform the operation. - auto& res = block.get_by_position(result); - const auto& ret_type = res.type; - const auto& nullable_type = static_cast(*ret_type); - const auto& nested_type = nullable_type.get_nested_type(); - - Block tmp_block; - size_t tmp_res_index = 0; - if (source_is_nullable) { - auto [t_block, tmp_args] = - create_block_with_nested_columns(block, arguments, true); - tmp_block = std::move(t_block); - tmp_res_index = tmp_block.columns(); - tmp_block.insert({nullptr, nested_type, ""}); - - /// Perform the requested conversion. - RETURN_IF_ERROR( - wrapper(context, tmp_block, {0}, tmp_res_index, input_rows_count)); - } else { - tmp_block = block; - - tmp_res_index = block.columns(); - tmp_block.insert({nullptr, nested_type, ""}); - - /// Perform the requested conversion. - RETURN_IF_ERROR(wrapper(context, tmp_block, arguments, tmp_res_index, - input_rows_count)); - } - - // Note: here we should return the nullable result column - const auto& tmp_res = tmp_block.get_by_position(tmp_res_index); - res.column = wrap_in_nullable(tmp_res.column, - Block({block.get_by_position(arguments[0]), tmp_res}), - {0}, 1, input_rows_count); - - return Status::OK(); - }; - } else if (source_is_nullable) { - /// Conversion from Nullable to non-Nullable. - - return [wrapper, skip_not_null_check](FunctionContext* context, Block& block, - const ColumnNumbers& arguments, - const size_t result, size_t input_rows_count) { - auto [tmp_block, tmp_args, tmp_res] = - create_block_with_nested_columns(block, arguments, result); - - /// Check that all values are not-NULL. - /// Check can be skipped in case if LowCardinality dictionary is transformed. - /// In that case, correctness will be checked beforehand. - if (!skip_not_null_check) { - const auto& col = block.get_by_position(arguments[0]).column; - const auto& nullable_col = assert_cast(*col); - const auto& null_map = nullable_col.get_null_map_data(); - - if (!memory_is_zero(null_map.data(), null_map.size())) { - return Status::RuntimeError( - "Cannot convert NULL value to non-Nullable type"); - } - } - - RETURN_IF_ERROR(wrapper(context, tmp_block, tmp_args, tmp_res, input_rows_count)); - block.get_by_position(result).column = tmp_block.get_by_position(tmp_res).column; + return [this, from_type, to_type](FunctionContext* context, Block& block, + const ColumnNumbers& arguments, const size_t result, + size_t input_rows_count) { + auto nested_result_index = block.columns(); + block.insert(block.get_by_position(result).get_nested()); + auto nested_source_index = block.columns(); + block.insert(block.get_by_position(arguments[0]).get_nested()); + + RETURN_IF_ERROR(prepare_impl(context, remove_nullable(from_type), + remove_nullable(to_type), + true)(context, block, {nested_source_index}, + nested_result_index, input_rows_count)); + + block.get_by_position(result).column = + wrap_in_nullable(block.get_by_position(nested_result_index).column, block, + arguments, result, input_rows_count); + + block.erase(nested_source_index); + block.erase(nested_result_index); return Status::OK(); }; } else { - return wrapper; + return prepare_impl(context, from_type, to_type, false); } } @@ -2359,11 +2305,11 @@ class FunctionCast final : public IFunctionBase { /// 'requested_result_is_nullable' is true if CAST to Nullable type is requested. WrapperType prepare_impl(FunctionContext* context, const DataTypePtr& from_type, const DataTypePtr& to_type, bool requested_result_is_nullable) const { - if (from_type->equals(*to_type)) + if (from_type->equals(*to_type)) { return create_identity_wrapper(from_type); - else if (WhichDataType(from_type).is_nothing()) - return create_nothing_wrapper(to_type.get()); + } + // variant needs to be judged first if (to_type->get_type_id() == TypeIndex::VARIANT) { return create_variant_wrapper(from_type, static_cast(*to_type)); } @@ -2371,15 +2317,14 @@ class FunctionCast final : public IFunctionBase { return create_variant_wrapper(static_cast(*from_type), to_type); } - if (from_type->get_type_id() == TypeIndex::JSONB) { - bool jsonb_string_as_string = context ? context->jsonb_string_as_string() : false; + switch (from_type->get_type_id()) { + case TypeIndex::Nothing: + return create_nothing_wrapper(to_type.get()); + case TypeIndex::JSONB: return create_jsonb_wrapper(static_cast(*from_type), to_type, - jsonb_string_as_string); - } - if (to_type->get_type_id() == TypeIndex::JSONB) { - bool string_as_jsonb_string = context ? context->string_as_jsonb_string() : false; - return create_jsonb_wrapper(from_type, static_cast(*to_type), - string_as_jsonb_string); + context ? context->jsonb_string_as_string() : false); + default: + break; } WrapperType ret; @@ -2425,8 +2370,9 @@ class FunctionCast final : public IFunctionBase { return false; }; - if (call_on_index_and_data_type(to_type->get_type_id(), make_default_wrapper)) + if (call_on_index_and_data_type(to_type->get_type_id(), make_default_wrapper)) { return ret; + } switch (to_type->get_type_id()) { case TypeIndex::String: @@ -2446,6 +2392,9 @@ class FunctionCast final : public IFunctionBase { case TypeIndex::BitMap: return create_bitmap_wrapper(context, from_type, static_cast(*to_type)); + case TypeIndex::JSONB: + return create_jsonb_wrapper(from_type, static_cast(*to_type), + context ? context->string_as_jsonb_string() : false); default: break; } diff --git a/be/src/vec/functions/function_rpc.cpp b/be/src/vec/functions/function_rpc.cpp index 8b8b605188ed36d..a900436ffc5e9f0 100644 --- a/be/src/vec/functions/function_rpc.cpp +++ b/be/src/vec/functions/function_rpc.cpp @@ -101,7 +101,7 @@ Status FunctionRPC::open(FunctionContext* context, FunctionContext::FunctionStat } Status FunctionRPC::execute(FunctionContext* context, Block& block, const ColumnNumbers& arguments, - size_t result, size_t input_rows_count, bool dry_run) { + size_t result, size_t input_rows_count, bool dry_run) const { RPCFnImpl* fn = reinterpret_cast( context->get_function_state(FunctionContext::FRAGMENT_LOCAL)); return fn->vec_call(context, block, arguments, result, input_rows_count); diff --git a/be/src/vec/functions/function_rpc.h b/be/src/vec/functions/function_rpc.h index d10b9be546bbd71..eda4f6b00b1d78a 100644 --- a/be/src/vec/functions/function_rpc.h +++ b/be/src/vec/functions/function_rpc.h @@ -95,7 +95,7 @@ class FunctionRPC : public IFunctionBase { Status open(FunctionContext* context, FunctionContext::FunctionStateScope scope) override; Status execute(FunctionContext* context, Block& block, const ColumnNumbers& arguments, - size_t result, size_t input_rows_count, bool dry_run = false) override; + size_t result, size_t input_rows_count, bool dry_run = false) const override; bool is_deterministic() const override { return false; } diff --git a/be/src/vec/functions/function_string.h b/be/src/vec/functions/function_string.h index 2b1d5891a663fda..4363d040cd54d09 100644 --- a/be/src/vec/functions/function_string.h +++ b/be/src/vec/functions/function_string.h @@ -17,7 +17,6 @@ #pragma once -#include #include #include #include @@ -52,6 +51,7 @@ #include "vec/common/memcpy_small.h" #include "vec/common/pod_array.h" #include "vec/common/pod_array_fwd.h" +#include "vec/common/string_utils/string_utils.h" #include "vec/common/typeid_cast.h" #include "vec/core/block.h" #include "vec/core/column_numbers.h" @@ -120,6 +120,14 @@ struct StringOP { chars.insert(string_value.data(), string_value.data() + string_value.size()); offsets[index] = chars.size(); } + + static void push_value_string_reserved_and_allow_overflow(const std::string_view& string_value, + int index, ColumnString::Chars& chars, + ColumnString::Offsets& offsets) { + chars.insert_assume_reserved_and_allow_overflow(string_value.data(), + string_value.data() + string_value.size()); + offsets[index] = chars.size(); + } }; struct SubstringUtil { @@ -147,30 +155,37 @@ struct SubstringUtil { check_set_nullable(argument_columns[i], null_map, col_const[i]); } - auto specific_str_column = assert_cast(argument_columns[0].get()); - auto specific_start_column = + const auto* specific_str_column = + assert_cast(argument_columns[0].get()); + const auto* specific_start_column = assert_cast*>(argument_columns[1].get()); - auto specific_len_column = + const auto* specific_len_column = assert_cast*>(argument_columns[2].get()); - if (col_const[1] && col_const[2]) { - vectors(specific_str_column->get_chars(), specific_str_column->get_offsets(), - specific_start_column->get_data(), specific_len_column->get_data(), - null_map->get_data(), res->get_chars(), res->get_offsets()); - } else { - vectors(specific_str_column->get_chars(), specific_str_column->get_offsets(), - specific_start_column->get_data(), specific_len_column->get_data(), - null_map->get_data(), res->get_chars(), res->get_offsets()); + + auto vectors = vectors_utf8; + bool is_ascii = simd::VStringFunctions::is_ascii( + {specific_str_column->get_chars().data(), specific_str_column->get_chars().size()}); + if (col_const[1] && col_const[2] && is_ascii) { + vectors = vectors_ascii; + } else if (col_const[1] && col_const[2]) { + vectors = vectors_utf8; + } else if (is_ascii) { + vectors = vectors_ascii; } + vectors(specific_str_column->get_chars(), specific_str_column->get_offsets(), + specific_start_column->get_data(), specific_len_column->get_data(), + null_map->get_data(), res->get_chars(), res->get_offsets()); + block.get_by_position(result).column = ColumnNullable::create(std::move(res), std::move(null_map)); } private: - template - static void vectors(const ColumnString::Chars& chars, const ColumnString::Offsets& offsets, - const PaddedPODArray& start, const PaddedPODArray& len, - NullMap& null_map, ColumnString::Chars& res_chars, - ColumnString::Offsets& res_offsets) { + template + static void vectors_utf8(const ColumnString::Chars& chars, const ColumnString::Offsets& offsets, + const PaddedPODArray& start, const PaddedPODArray& len, + NullMap& null_map, ColumnString::Chars& res_chars, + ColumnString::Offsets& res_offsets) { size_t size = offsets.size(); res_offsets.resize(size); res_chars.reserve(chars.size()); @@ -179,119 +194,104 @@ struct SubstringUtil { PMR::monotonic_buffer_resource pool {buf.data(), buf.size()}; PMR::vector index {&pool}; - auto* __restrict data_ptr = chars.data(); - auto* __restrict offset_ptr = offsets.data(); - - if constexpr (Const) { - const auto start_value = start[0]; - const auto len_value = len[0]; - if (start_value == 0 || len_value <= 0) { + if constexpr (is_const) { + if (start[0] == 0 || len[0] <= 0) { for (size_t i = 0; i < size; ++i) { StringOP::push_empty_string(i, res_chars, res_offsets); } - } else { - for (size_t i = 0; i < size; ++i) { - const int str_size = offset_ptr[i] - offset_ptr[i - 1]; - const uint8_t* raw_str = data_ptr + offset_ptr[i - 1]; - // return empty string if start > src.length - if (start_value > str_size || start_value < -str_size || str_size == 0) { - StringOP::push_empty_string(i, res_chars, res_offsets); - continue; - } - // reference to string_function.cpp: substring - size_t byte_pos = 0; - index.clear(); - for (size_t j = 0, char_size = 0; - j < str_size && - (start_value <= 0 || index.size() <= start_value + len_value); - j += char_size) { - char_size = UTF8_BYTE_LENGTH[(unsigned char)(raw_str)[j]]; - index.push_back(j); - } + return; + } + } - int fixed_pos = start_value; - if (fixed_pos < 0) { - fixed_pos = str_size + fixed_pos + 1; - } else if (fixed_pos > index.size()) { - StringOP::push_null_string(i, res_chars, res_offsets, null_map); - continue; - } + for (size_t i = 0; i < size; ++i) { + int str_size = offsets[i] - offsets[i - 1]; + const char* str_data = (char*)chars.data() + offsets[i - 1]; + int start_value = is_const ? start[0] : start[i]; + int len_value = is_const ? len[0] : len[i]; - byte_pos = index[fixed_pos - 1]; - int fixed_len = str_size - byte_pos; - if (fixed_pos + len_value <= index.size()) { - fixed_len = index[fixed_pos + len_value - 1] - byte_pos; - } + // return empty string if start > src.length + if (start_value > str_size || str_size == 0 || start_value == 0 || len_value <= 0) { + StringOP::push_empty_string(i, res_chars, res_offsets); + continue; + } - if (byte_pos <= str_size && fixed_len > 0) { - // return StringRef(str.data + byte_pos, fixed_len); - StringOP::push_value_string( - std::string_view {reinterpret_cast(raw_str + byte_pos), - (size_t)fixed_len}, - i, res_chars, res_offsets); - } else { - StringOP::push_empty_string(i, res_chars, res_offsets); - } + size_t byte_pos = 0; + index.clear(); + for (size_t j = 0, char_size = 0; j < str_size; j += char_size) { + char_size = get_utf8_byte_length(str_data[j]); + index.push_back(j); + if (start_value > 0 && index.size() > start_value + len_value) { + break; } } - } else { - PMR::vector> strs(&pool); - strs.resize(size); - for (int i = 0; i < size; ++i) { - strs[i].first = data_ptr + offset_ptr[i - 1]; - strs[i].second = offset_ptr[i] - offset_ptr[i - 1]; + + int fixed_pos = start_value; + if (fixed_pos < -(int)index.size()) { + StringOP::push_empty_string(i, res_chars, res_offsets); + continue; + } + if (fixed_pos < 0) { + fixed_pos = index.size() + fixed_pos + 1; + } + if (fixed_pos > index.size()) { + StringOP::push_null_string(i, res_chars, res_offsets, null_map); + continue; } - for (size_t i = 0; i < size; ++i) { - auto [raw_str, str_size] = strs[i]; - const auto& start_value = start[i]; - const auto& len_value = len[i]; + byte_pos = index[fixed_pos - 1]; + size_t fixed_len = str_size - byte_pos; + if (fixed_pos + len_value <= index.size()) { + fixed_len = index[fixed_pos + len_value - 1] - byte_pos; + } - // return empty string if start > src.length - if (start_value > str_size || str_size == 0 || start_value == 0 || len_value <= 0) { - StringOP::push_empty_string(i, res_chars, res_offsets); - continue; - } - // reference to string_function.cpp: substring - size_t byte_pos = 0; - index.clear(); - for (size_t j = 0, char_size = 0; j < str_size; j += char_size) { - char_size = UTF8_BYTE_LENGTH[(unsigned char)(raw_str)[j]]; - index.push_back(j); - if (start_value > 0 && index.size() > start_value + len_value) { - break; - } - } + if (byte_pos <= str_size && fixed_len > 0) { + StringOP::push_value_string_reserved_and_allow_overflow( + {str_data + byte_pos, fixed_len}, i, res_chars, res_offsets); + } else { + StringOP::push_empty_string(i, res_chars, res_offsets); + } + } + } - int fixed_pos = start_value; - if (fixed_pos < -(int)index.size()) { + template + static void vectors_ascii(const ColumnString::Chars& chars, + const ColumnString::Offsets& offsets, + const PaddedPODArray& start, const PaddedPODArray& len, + NullMap& null_map, ColumnString::Chars& res_chars, + ColumnString::Offsets& res_offsets) { + size_t size = offsets.size(); + res_offsets.resize(size); + + if constexpr (is_const) { + if (start[0] == 0 || len[0] <= 0) { + for (size_t i = 0; i < size; ++i) { StringOP::push_empty_string(i, res_chars, res_offsets); - continue; - } - if (fixed_pos < 0) { - fixed_pos = index.size() + fixed_pos + 1; - } - if (fixed_pos > index.size()) { - StringOP::push_null_string(i, res_chars, res_offsets, null_map); - continue; } + return; + } + res_chars.reserve(std::min(chars.size(), len[0] * size)); + } else { + res_chars.reserve(chars.size()); + } - byte_pos = index[fixed_pos - 1]; - int fixed_len = str_size - byte_pos; - if (fixed_pos + len_value <= index.size()) { - fixed_len = index[fixed_pos + len_value - 1] - byte_pos; - } + for (size_t i = 0; i < size; ++i) { + int str_size = offsets[i] - offsets[i - 1]; + const char* str_data = (char*)chars.data() + offsets[i - 1]; - if (byte_pos <= str_size && fixed_len > 0) { - // return StringRef(str.data + byte_pos, fixed_len); - StringOP::push_value_string( - std::string_view {reinterpret_cast(raw_str + byte_pos), - (size_t)fixed_len}, - i, res_chars, res_offsets); - } else { - StringOP::push_empty_string(i, res_chars, res_offsets); - } + int start_value = is_const ? start[0] : start[i]; + int len_value = is_const ? len[0] : len[i]; + + if (start_value > str_size || start_value < -str_size || str_size == 0) { + StringOP::push_empty_string(i, res_chars, res_offsets); + continue; + } + int fixed_pos = start_value - 1; + if (fixed_pos < 0) { + fixed_pos = str_size + fixed_pos + 1; } + size_t fixed_len = std::min(str_size - fixed_pos, len_value); + StringOP::push_value_string_reserved_and_allow_overflow( + {str_data + fixed_pos, fixed_len}, i, res_chars, res_offsets); } } }; diff --git a/be/src/vec/functions/functions_geo.cpp b/be/src/vec/functions/functions_geo.cpp index 34bec39d4ffbeed..ac6969c582d1672 100644 --- a/be/src/vec/functions/functions_geo.cpp +++ b/be/src/vec/functions/functions_geo.cpp @@ -132,7 +132,7 @@ struct StX { auto pt = point.decode_from(point_value.data, point_value.size); if (!pt) { - res->insert_data(nullptr, 0); + res->insert_default(); continue; } auto x_value = point.x(); @@ -164,7 +164,7 @@ struct StY { auto pt = point.decode_from(point_value.data, point_value.size); if (!pt) { - res->insert_data(nullptr, 0); + res->insert_default(); continue; } auto y_value = point.y(); @@ -199,7 +199,7 @@ struct StDistanceSphere { x_lat->operator[](row).get(), y_lng->operator[](row).get(), y_lat->operator[](row).get(), &distance)) { - res->insert_data(nullptr, 0); + res->insert_default(); continue; } res->insert_data(const_cast((char*)&distance), 0); @@ -233,7 +233,7 @@ struct StAngleSphere { x_lat->operator[](row).get(), y_lng->operator[](row).get(), y_lat->operator[](row).get(), &angle)) { - res->insert_data(nullptr, 0); + res->insert_default(); continue; } res->insert_data(const_cast((char*)&angle), 0); @@ -266,26 +266,26 @@ struct StAngle { auto shape_value1 = p1->get_data_at(row); auto pt1 = point1.decode_from(shape_value1.data, shape_value1.size); if (!pt1) { - res->insert_data(nullptr, 0); + res->insert_default(); continue; } auto shape_value2 = p2->get_data_at(row); auto pt2 = point2.decode_from(shape_value2.data, shape_value2.size); if (!pt2) { - res->insert_data(nullptr, 0); + res->insert_default(); continue; } auto shape_value3 = p3->get_data_at(row); auto pt3 = point3.decode_from(shape_value3.data, shape_value3.size); if (!pt3) { - res->insert_data(nullptr, 0); + res->insert_default(); continue; } double angle = 0; if (!GeoPoint::ComputeAngle(&point1, &point2, &point3, &angle)) { - res->insert_data(nullptr, 0); + res->insert_default(); continue; } res->insert_data(const_cast((char*)&angle), 0); @@ -315,20 +315,20 @@ struct StAzimuth { auto shape_value1 = p1->get_data_at(row); auto pt1 = point1.decode_from(shape_value1.data, shape_value1.size); if (!pt1) { - res->insert_data(nullptr, 0); + res->insert_default(); continue; } auto shape_value2 = p2->get_data_at(row); auto pt2 = point2.decode_from(shape_value2.data, shape_value2.size); if (!pt2) { - res->insert_data(nullptr, 0); + res->insert_default(); continue; } double angle = 0; if (!GeoPoint::ComputeAzimuth(&point1, &point2, &angle)) { - res->insert_data(nullptr, 0); + res->insert_default(); continue; } res->insert_data(const_cast((char*)&angle), 0); @@ -356,13 +356,13 @@ struct StAreaSquareMeters { auto shape_value = col->get_data_at(row); shape = GeoShape::from_encoded(shape_value.data, shape_value.size); if (!shape) { - res->insert_data(nullptr, 0); + res->insert_default(); continue; } double area = 0; if (!GeoShape::ComputeArea(shape.get(), &area, "square_meters")) { - res->insert_data(nullptr, 0); + res->insert_default(); continue; } res->insert_data(const_cast((char*)&area), 0); @@ -391,13 +391,13 @@ struct StAreaSquareKm { auto shape_value = col->get_data_at(row); shape = GeoShape::from_encoded(shape_value.data, shape_value.size); if (!shape) { - res->insert_data(nullptr, 0); + res->insert_default(); continue; } double area = 0; if (!GeoShape::ComputeArea(shape.get(), &area, "square_km")) { - res->insert_data(nullptr, 0); + res->insert_default(); continue; } res->insert_data(const_cast((char*)&area), 0); @@ -479,7 +479,7 @@ struct StContains { shapes[i] = std::shared_ptr( GeoShape::from_encoded(strs[i]->data, strs[i]->size)); if (shapes[i] == nullptr) { - res->insert_data(nullptr, 0); + res->insert_default(); break; } } diff --git a/be/src/vec/functions/functions_geo.h b/be/src/vec/functions/functions_geo.h index 11d2cd7f881b97e..9c4db09e14967f9 100644 --- a/be/src/vec/functions/functions_geo.h +++ b/be/src/vec/functions/functions_geo.h @@ -68,7 +68,6 @@ class GeoFunction : public IFunction { DataTypePtr get_return_type_impl(const DataTypes& arguments) const override { return make_nullable(std::make_shared()); } - bool use_default_implementation_for_nulls() const override { return true; } Status execute_impl(FunctionContext* context, Block& block, const ColumnNumbers& arguments, size_t result, size_t input_rows_count) const override { diff --git a/be/src/vec/functions/functions_logical.cpp b/be/src/vec/functions/functions_logical.cpp index 1eceeac3de4aa75..c0b8d62ec259ba4 100644 --- a/be/src/vec/functions/functions_logical.cpp +++ b/be/src/vec/functions/functions_logical.cpp @@ -187,9 +187,8 @@ DataTypePtr FunctionAnyArityLogical::get_return_type_impl( } } - if (!(is_native_number(arg_type) || - (Impl::special_implementation_for_nulls() && - (arg_type->only_null() || is_native_number(remove_nullable(arg_type)))))) { + if (!(is_native_number(arg_type) || (Impl::special_implementation_for_nulls() && + is_native_number(remove_nullable(arg_type))))) { LOG(FATAL) << fmt::format("Illegal type ({}) of {} argument of function {}", arg_type->get_name(), i + 1, get_name()); } diff --git a/be/src/vec/functions/least_greast.cpp b/be/src/vec/functions/least_greast.cpp index 9d8cd318b499cfe..06f14ec669ee806 100644 --- a/be/src/vec/functions/least_greast.cpp +++ b/be/src/vec/functions/least_greast.cpp @@ -190,8 +190,7 @@ struct FunctionFieldImpl { for (int row = 0; row < input_rows_count; ++row) { const auto& str_data = column_string.get_data_at(index_check_const(row, arg_const)); for (int col = 1; col < column_size; ++col) { - auto [column, is_const] = - unpack_if_const(block.safe_get_by_position(col).column); + auto [column, is_const] = unpack_if_const(argument_columns[col]); const auto& temp_data = assert_cast(*column).get_data_at( index_check_const(row, is_const)); if (EqualsOp::apply(temp_data, str_data)) { diff --git a/be/src/vec/functions/nullif.cpp b/be/src/vec/functions/nullif.cpp index 2fccee27d4d3126..315ca52d1bc423c 100644 --- a/be/src/vec/functions/nullif.cpp +++ b/be/src/vec/functions/nullif.cpp @@ -50,11 +50,6 @@ class FunctionContext; namespace doris::vectorized { class FunctionNullIf : public IFunction { public: - struct NullPresence { - bool has_nullable = false; - bool has_null_constant = false; - }; - static constexpr auto name = "nullif"; static FunctionPtr create() { return std::make_shared(); } @@ -69,33 +64,18 @@ class FunctionNullIf : public IFunction { return make_nullable(arguments[0]); } - NullPresence get_null_resense(const ColumnsWithTypeAndName& args) const { - NullPresence res; - - for (const auto& elem : args) { - if (!res.has_nullable) res.has_nullable = elem.type->is_nullable(); - if (!res.has_null_constant) res.has_null_constant = elem.type->only_null(); - } - - return res; - } - - DataTypePtr get_return_type_for_equal(const ColumnsWithTypeAndName& arguments) const { + static DataTypePtr get_return_type_for_equal(const ColumnsWithTypeAndName& arguments) { ColumnsWithTypeAndName args_without_low_cardinality(arguments); for (ColumnWithTypeAndName& arg : args_without_low_cardinality) { bool is_const = arg.column && is_column_const(*arg.column); - if (is_const) + if (is_const) { arg.column = assert_cast(*arg.column).remove_low_cardinality(); + } } if (!arguments.empty()) { - NullPresence null_presence = get_null_resense(arguments); - - if (null_presence.has_null_constant) { - return make_nullable(std::make_shared()); - } - if (null_presence.has_nullable) { + if (get_null_presence(arguments)) { return make_nullable(std::make_shared()); } } diff --git a/be/src/vec/runtime/vdata_stream_mgr.cpp b/be/src/vec/runtime/vdata_stream_mgr.cpp index 425e92e92e125b6..1210cd811d1d222 100644 --- a/be/src/vec/runtime/vdata_stream_mgr.cpp +++ b/be/src/vec/runtime/vdata_stream_mgr.cpp @@ -57,14 +57,13 @@ inline uint32_t VDataStreamMgr::get_hash_value(const TUniqueId& fragment_instanc std::shared_ptr VDataStreamMgr::create_recvr( RuntimeState* state, const RowDescriptor& row_desc, const TUniqueId& fragment_instance_id, - PlanNodeId dest_node_id, int num_senders, RuntimeProfile* profile, bool is_merging, - std::shared_ptr sub_plan_query_statistics_recvr) { + PlanNodeId dest_node_id, int num_senders, RuntimeProfile* profile, bool is_merging) { DCHECK(profile != nullptr); VLOG_FILE << "creating receiver for fragment=" << print_id(fragment_instance_id) << ", node=" << dest_node_id; - std::shared_ptr recvr(new VDataStreamRecvr( - this, state, row_desc, fragment_instance_id, dest_node_id, num_senders, is_merging, - profile, sub_plan_query_statistics_recvr)); + std::shared_ptr recvr(new VDataStreamRecvr(this, state, row_desc, + fragment_instance_id, dest_node_id, + num_senders, is_merging, profile)); uint32_t hash_value = get_hash_value(fragment_instance_id, dest_node_id); std::lock_guard l(_lock); _fragment_stream_set.insert(std::make_pair(fragment_instance_id, dest_node_id)); @@ -127,12 +126,6 @@ Status VDataStreamMgr::transmit_block(const PTransmitDataParams* request, // then the upstream node may report error status to FE, the query is failed. return Status::EndOfFile("data stream receiver is deconstructed"); } - // request can only be used before calling recvr's add_batch or when request - // is the last for the sender, because request maybe released after it's batch - // is consumed by ExchangeNode. - if (request->has_query_statistics()) { - recvr->add_sub_plan_statistics(request->query_statistics(), request->sender_id()); - } bool eos = request->eos(); if (request->has_block()) { diff --git a/be/src/vec/runtime/vdata_stream_mgr.h b/be/src/vec/runtime/vdata_stream_mgr.h index d809ff96fbd6e9e..853d984621156ce 100644 --- a/be/src/vec/runtime/vdata_stream_mgr.h +++ b/be/src/vec/runtime/vdata_stream_mgr.h @@ -39,7 +39,6 @@ namespace doris { class RuntimeState; class RowDescriptor; class RuntimeProfile; -class QueryStatisticsRecvr; class PTransmitDataParams; namespace vectorized { @@ -50,11 +49,11 @@ class VDataStreamMgr { VDataStreamMgr(); ~VDataStreamMgr(); - std::shared_ptr create_recvr( - RuntimeState* state, const RowDescriptor& row_desc, - const TUniqueId& fragment_instance_id, PlanNodeId dest_node_id, int num_senders, - RuntimeProfile* profile, bool is_merging, - std::shared_ptr sub_plan_query_statistics_recvr); + std::shared_ptr create_recvr(RuntimeState* state, + const RowDescriptor& row_desc, + const TUniqueId& fragment_instance_id, + PlanNodeId dest_node_id, int num_senders, + RuntimeProfile* profile, bool is_merging); std::shared_ptr find_recvr(const TUniqueId& fragment_instance_id, PlanNodeId node_id, bool acquire_lock = true); diff --git a/be/src/vec/runtime/vdata_stream_recvr.cpp b/be/src/vec/runtime/vdata_stream_recvr.cpp index b75bd4c21bfa458..dfc574591b3c570 100644 --- a/be/src/vec/runtime/vdata_stream_recvr.cpp +++ b/be/src/vec/runtime/vdata_stream_recvr.cpp @@ -336,11 +336,10 @@ void VDataStreamRecvr::SenderQueue::close() { _block_queue.clear(); } -VDataStreamRecvr::VDataStreamRecvr( - VDataStreamMgr* stream_mgr, RuntimeState* state, const RowDescriptor& row_desc, - const TUniqueId& fragment_instance_id, PlanNodeId dest_node_id, int num_senders, - bool is_merging, RuntimeProfile* profile, - std::shared_ptr sub_plan_query_statistics_recvr) +VDataStreamRecvr::VDataStreamRecvr(VDataStreamMgr* stream_mgr, RuntimeState* state, + const RowDescriptor& row_desc, + const TUniqueId& fragment_instance_id, PlanNodeId dest_node_id, + int num_senders, bool is_merging, RuntimeProfile* profile) : HasTaskExecutionCtx(state), _mgr(stream_mgr), #ifdef USE_MEM_TRACKER @@ -354,7 +353,6 @@ VDataStreamRecvr::VDataStreamRecvr( _is_closed(false), _profile(profile), _peak_memory_usage_counter(nullptr), - _sub_plan_query_statistics_recvr(sub_plan_query_statistics_recvr), _enable_pipeline(state->enable_pipeline_exec()), _mem_available(std::make_shared(true)) { // DataStreamRecvr may be destructed after the instance execution thread ends. @@ -483,17 +481,6 @@ void VDataStreamRecvr::remove_sender(int sender_id, int be_number, Status exec_s _sender_queues[use_sender_id]->decrement_senders(be_number); } -void VDataStreamRecvr::remove_sender(int sender_id, int be_number, QueryStatisticsPtr statistics, - Status exec_status) { - if (!exec_status.ok()) { - cancel_stream(exec_status); - return; - } - int use_sender_id = _is_merging ? sender_id : 0; - _sender_queues[use_sender_id]->decrement_senders(be_number); - _sub_plan_query_statistics_recvr->insert(statistics, sender_id); -} - void VDataStreamRecvr::cancel_stream(Status exec_status) { VLOG_QUERY << "cancel_stream: fragment_instance_id=" << print_id(_fragment_instance_id) << exec_status; diff --git a/be/src/vec/runtime/vdata_stream_recvr.h b/be/src/vec/runtime/vdata_stream_recvr.h index 7f9436cba5a5f50..141a5c54b64a594 100644 --- a/be/src/vec/runtime/vdata_stream_recvr.h +++ b/be/src/vec/runtime/vdata_stream_recvr.h @@ -56,7 +56,6 @@ namespace doris { class MemTracker; class PBlock; class MemTrackerLimiter; -class PQueryStatistics; class RuntimeState; namespace pipeline { @@ -76,8 +75,7 @@ class VDataStreamRecvr : public HasTaskExecutionCtx { class SenderQueue; VDataStreamRecvr(VDataStreamMgr* stream_mgr, RuntimeState* state, const RowDescriptor& row_desc, const TUniqueId& fragment_instance_id, PlanNodeId dest_node_id, - int num_senders, bool is_merging, RuntimeProfile* profile, - std::shared_ptr sub_plan_query_statistics_recvr); + int num_senders, bool is_merging, RuntimeProfile* profile); virtual ~VDataStreamRecvr(); @@ -103,17 +101,10 @@ class VDataStreamRecvr : public HasTaskExecutionCtx { PlanNodeId dest_node_id() const { return _dest_node_id; } const RowDescriptor& row_desc() const { return _row_desc; } - void add_sub_plan_statistics(const PQueryStatistics& statistics, int sender_id) { - _sub_plan_query_statistics_recvr->insert(statistics, sender_id); - } - // Indicate that a particular sender is done. Delegated to the appropriate // sender queue. Called from DataStreamMgr. void remove_sender(int sender_id, int be_number, Status exec_status); - void remove_sender(int sender_id, int be_number, QueryStatisticsPtr statistics, - Status exec_status); - void cancel_stream(Status exec_status); void close(); @@ -184,8 +175,6 @@ class VDataStreamRecvr : public HasTaskExecutionCtx { // Number of blocks received RuntimeProfile::Counter* _blocks_produced_counter = nullptr; - std::shared_ptr _sub_plan_query_statistics_recvr; - bool _enable_pipeline; std::vector> _sender_to_local_channel_dependency; @@ -213,7 +202,7 @@ class VDataStreamRecvr::SenderQueue { _local_channel_dependency = local_channel_dependency; } - virtual bool should_wait(); + bool should_wait(); virtual Status get_batch(Block* next_block, bool* eos); diff --git a/be/src/vec/sink/load_stream_stub.cpp b/be/src/vec/sink/load_stream_stub.cpp index 5ef4e02f66a127c..d17090f8aa30f05 100644 --- a/be/src/vec/sink/load_stream_stub.cpp +++ b/be/src/vec/sink/load_stream_stub.cpp @@ -151,7 +151,7 @@ Status LoadStreamStub::open(std::shared_ptr self, const NodeInfo& node_info, int64_t txn_id, const OlapTableSchemaParam& schema, const std::vector& tablets_for_schema, int total_streams, - bool enable_profile) { + int64_t idle_timeout_ms, bool enable_profile) { std::unique_lock lock(_open_mutex); if (_is_init.load()) { return Status::OK(); @@ -160,7 +160,7 @@ Status LoadStreamStub::open(std::shared_ptr self, std::string host_port = get_host_port(node_info.host, node_info.brpc_port); brpc::StreamOptions opt; opt.max_buf_size = config::load_stream_max_buf_size; - opt.idle_timeout_ms = config::load_stream_idle_timeout_ms; + opt.idle_timeout_ms = idle_timeout_ms; opt.messages_in_batch = config::load_stream_messages_in_batch; opt.handler = new LoadStreamReplyHandler(_load_id, _dst_id, self); brpc::Controller cntl; @@ -174,6 +174,7 @@ Status LoadStreamStub::open(std::shared_ptr self, request.set_txn_id(txn_id); request.set_enable_profile(enable_profile); request.set_total_streams(total_streams); + request.set_idle_timeout_ms(idle_timeout_ms); schema.to_protobuf(request.mutable_schema()); for (auto& tablet : tablets_for_schema) { *request.add_tablets() = tablet; diff --git a/be/src/vec/sink/load_stream_stub.h b/be/src/vec/sink/load_stream_stub.h index 81ec99fa451abf6..6aae778dc93551c 100644 --- a/be/src/vec/sink/load_stream_stub.h +++ b/be/src/vec/sink/load_stream_stub.h @@ -125,7 +125,7 @@ class LoadStreamStub { BrpcClientCache* client_cache, const NodeInfo& node_info, int64_t txn_id, const OlapTableSchemaParam& schema, const std::vector& tablets_for_schema, int total_streams, - bool enable_profile); + int64_t idle_timeout_ms, bool enable_profile); // for mock this class in UT #ifdef BE_TEST diff --git a/be/src/vec/sink/vdata_stream_sender.cpp b/be/src/vec/sink/vdata_stream_sender.cpp index 53c140ef5a8034b..6c4d10839e0a035 100644 --- a/be/src/vec/sink/vdata_stream_sender.cpp +++ b/be/src/vec/sink/vdata_stream_sender.cpp @@ -152,13 +152,7 @@ Status Channel::send_local_block(Status exec_status, bool eos) { _local_recvr->add_block(&block, _parent->sender_id(), true); if (eos) { - /// TODO: Supported on pipelineX, we can hold QueryStatistics on the fragment instead of on instances. - if constexpr (std::is_same_v) { - _local_recvr->remove_sender(_parent->sender_id(), _be_number, - _parent->query_statisticsPtr(), exec_status); - } else { - _local_recvr->remove_sender(_parent->sender_id(), _be_number, exec_status); - } + _local_recvr->remove_sender(_parent->sender_id(), _be_number, exec_status); } return Status::OK(); } else { @@ -199,10 +193,6 @@ Status Channel::send_remote_block(PBlock* block, bool eos, Status exec_s VLOG_ROW << "Channel::send_batch() instance_id=" << print_id(_fragment_instance_id) << " dest_node=" << _dest_node_id << " to_host=" << _brpc_dest_addr.hostname << " _packet_seq=" << _packet_seq << " row_desc=" << _row_desc.debug_string(); - if (_is_transfer_chain && (_send_query_statistics_with_every_batch || eos)) { - auto statistic = _brpc_request->mutable_query_statistics(); - _parent->query_statistics()->to_pb(statistic); - } _brpc_request->set_eos(eos); if (!exec_status.ok()) { @@ -289,12 +279,7 @@ Status Channel::close_internal(Status exec_status) { SCOPED_CONSUME_MEM_TRACKER(_parent->mem_tracker()); if (is_local()) { if (_recvr_is_valid()) { - if constexpr (std::is_same_v) { - _local_recvr->remove_sender(_parent->sender_id(), _be_number, - _parent->query_statisticsPtr(), exec_status); - } else { - _local_recvr->remove_sender(_parent->sender_id(), _be_number, exec_status); - } + _local_recvr->remove_sender(_parent->sender_id(), _be_number, exec_status); } } else { status = send_remote_block((PBlock*)nullptr, true, exec_status); @@ -329,8 +314,7 @@ void Channel::ch_roll_pb_block() { VDataStreamSender::VDataStreamSender(RuntimeState* state, ObjectPool* pool, int sender_id, const RowDescriptor& row_desc, const TDataStreamSink& sink, - const std::vector& destinations, - bool send_query_statistics_with_every_batch) + const std::vector& destinations) : DataSink(row_desc), _sender_id(sender_id), _state(state), @@ -351,21 +335,17 @@ VDataStreamSender::VDataStreamSender(RuntimeState* state, ObjectPool* pool, int _enable_pipeline_exec = state->enable_pipeline_exec(); for (int i = 0; i < destinations.size(); ++i) { - // Select first dest as transfer chain. - bool is_transfer_chain = (i == 0); const auto& fragment_instance_id = destinations[i].fragment_instance_id; if (fragment_id_to_channel_index.find(fragment_instance_id.lo) == fragment_id_to_channel_index.end()) { if (_enable_pipeline_exec) { _channel_shared_ptrs.emplace_back(new PipChannel( this, row_desc, destinations[i].brpc_server, fragment_instance_id, - sink.dest_node_id, is_transfer_chain, - send_query_statistics_with_every_batch)); + sink.dest_node_id)); } else { _channel_shared_ptrs.emplace_back( new Channel(this, row_desc, destinations[i].brpc_server, - fragment_instance_id, sink.dest_node_id, is_transfer_chain, - send_query_statistics_with_every_batch)); + fragment_instance_id, sink.dest_node_id)); } fragment_id_to_channel_index.emplace(fragment_instance_id.lo, _channel_shared_ptrs.size() - 1); @@ -388,8 +368,7 @@ VDataStreamSender::VDataStreamSender(RuntimeState* state, ObjectPool* pool, int VDataStreamSender::VDataStreamSender(RuntimeState* state, ObjectPool* pool, int sender_id, const RowDescriptor& row_desc, PlanNodeId dest_node_id, - const std::vector& destinations, - bool send_query_statistics_with_every_batch) + const std::vector& destinations) : DataSink(row_desc), _sender_id(sender_id), _state(state), @@ -405,9 +384,9 @@ VDataStreamSender::VDataStreamSender(RuntimeState* state, ObjectPool* pool, int const auto& fragment_instance_id = destinations[i].fragment_instance_id; if (fragment_id_to_channel_index.find(fragment_instance_id.lo) == fragment_id_to_channel_index.end()) { - _channel_shared_ptrs.emplace_back( - new Channel(this, row_desc, destinations[i].brpc_server, fragment_instance_id, - _dest_node_id, false, send_query_statistics_with_every_batch)); + _channel_shared_ptrs.emplace_back(new Channel(this, row_desc, + destinations[i].brpc_server, + fragment_instance_id, _dest_node_id)); } fragment_id_to_channel_index.emplace(fragment_instance_id.lo, _channel_shared_ptrs.size() - 1); diff --git a/be/src/vec/sink/vdata_stream_sender.h b/be/src/vec/sink/vdata_stream_sender.h index f59dad266f87abd..ca020d9bab8f972 100644 --- a/be/src/vec/sink/vdata_stream_sender.h +++ b/be/src/vec/sink/vdata_stream_sender.h @@ -108,13 +108,11 @@ class VDataStreamSender : public DataSink { friend class pipeline::ExchangeSinkOperator; VDataStreamSender(RuntimeState* state, ObjectPool* pool, int sender_id, const RowDescriptor& row_desc, const TDataStreamSink& sink, - const std::vector& destinations, - bool send_query_statistics_with_every_batch); + const std::vector& destinations); VDataStreamSender(RuntimeState* state, ObjectPool* pool, int sender_id, const RowDescriptor& row_desc, PlanNodeId dest_node_id, - const std::vector& destinations, - bool send_query_statistics_with_every_batch); + const std::vector& destinations); ~VDataStreamSender() override; @@ -145,8 +143,6 @@ class VDataStreamSender : public DataSink { return _split_block_distribute_by_channel_timer; } MemTracker* mem_tracker() { return _mem_tracker.get(); } - QueryStatistics* query_statistics() { return _query_statistics.get(); } - QueryStatisticsPtr query_statisticsPtr() { return _query_statistics; } bool transfer_large_data_by_brpc() { return _transfer_large_data_by_brpc; } RuntimeProfile::Counter* merge_block_timer() { return _merge_block_timer; } segment_v2::CompressionTypePB compression_type() const { return _compression_type; } @@ -237,8 +233,7 @@ class Channel { // how much tuple data is getting accumulated before being sent; it only applies // when data is added via add_row() and not sent directly via send_batch(). Channel(Parent* parent, const RowDescriptor& row_desc, const TNetworkAddress& brpc_dest, - const TUniqueId& fragment_instance_id, PlanNodeId dest_node_id, bool is_transfer_chain, - bool send_query_statistics_with_every_batch) + const TUniqueId& fragment_instance_id, PlanNodeId dest_node_id) : _parent(parent), _row_desc(row_desc), _fragment_instance_id(fragment_instance_id), @@ -248,8 +243,6 @@ class Channel { _need_close(false), _closed(false), _brpc_dest_addr(brpc_dest), - _is_transfer_chain(is_transfer_chain), - _send_query_statistics_with_every_batch(send_query_statistics_with_every_batch), _is_local((_brpc_dest_addr.hostname == BackendOptions::get_localhost()) && (_brpc_dest_addr.port == config::brpc_port)), _serializer(_parent, _is_local) { @@ -380,9 +373,6 @@ class Channel { std::shared_ptr> _send_remote_block_callback; Status _receiver_status; int32_t _brpc_timeout_ms = 500; - // whether the dest can be treated as query statistics transfer chain. - bool _is_transfer_chain; - bool _send_query_statistics_with_every_batch; RuntimeState* _state = nullptr; bool _is_local; @@ -442,10 +432,8 @@ template class PipChannel final : public Channel { public: PipChannel(Parent* parent, const RowDescriptor& row_desc, const TNetworkAddress& brpc_dest, - const TUniqueId& fragment_instance_id, PlanNodeId dest_node_id, - bool is_transfer_chain, bool send_query_statistics_with_every_batch) - : Channel(parent, row_desc, brpc_dest, fragment_instance_id, dest_node_id, - is_transfer_chain, send_query_statistics_with_every_batch) { + const TUniqueId& fragment_instance_id, PlanNodeId dest_node_id) + : Channel(parent, row_desc, brpc_dest, fragment_instance_id, dest_node_id) { ch_roll_pb_block(); } diff --git a/be/src/vec/sink/vresult_file_sink.cpp b/be/src/vec/sink/vresult_file_sink.cpp index 5bb0de4b50650ca..02d77fa6d429242 100644 --- a/be/src/vec/sink/vresult_file_sink.cpp +++ b/be/src/vec/sink/vresult_file_sink.cpp @@ -32,7 +32,6 @@ #include "vec/exprs/vexpr.h" namespace doris { -class QueryStatistics; class TExpr; } // namespace doris @@ -45,15 +44,13 @@ VResultFileSink::VResultFileSink(const RowDescriptor& row_desc, VResultFileSink::VResultFileSink(RuntimeState* state, ObjectPool* pool, int sender_id, const RowDescriptor& row_desc, const TResultFileSink& sink, const std::vector& destinations, - bool send_query_statistics_with_every_batch, const std::vector& t_output_expr, DescriptorTbl& descs) : AsyncWriterSink(row_desc, t_output_expr), _output_row_descriptor(descs.get_tuple_descriptor(sink.output_tuple_id), false) { _is_top_sink = false; CHECK_EQ(destinations.size(), 1); _stream_sender.reset(new VDataStreamSender(state, pool, sender_id, row_desc, sink.dest_node_id, - destinations, - send_query_statistics_with_every_batch)); + destinations)); } Status VResultFileSink::init(const TDataSink& tsink) { @@ -127,7 +124,7 @@ Status VResultFileSink::close(RuntimeState* state, Status exec_status) { if (_is_top_sink) { // close sender, this is normal path end if (_sender) { - _sender->update_num_written_rows(_writer == nullptr ? 0 : _writer->get_written_rows()); + _sender->update_return_rows(_writer == nullptr ? 0 : _writer->get_written_rows()); static_cast(_sender->close(final_status)); } static_cast(state->exec_env()->result_mgr()->cancel_at_time( @@ -148,12 +145,4 @@ Status VResultFileSink::close(RuntimeState* state, Status exec_status) { return Status::OK(); } -void VResultFileSink::set_query_statistics(std::shared_ptr statistics) { - if (_is_top_sink) { - _sender->set_query_statistics(statistics); - } else { - _stream_sender->set_query_statistics(statistics); - } -} - } // namespace doris::vectorized diff --git a/be/src/vec/sink/vresult_file_sink.h b/be/src/vec/sink/vresult_file_sink.h index b0d05823a503930..65bc0492d89a32c 100644 --- a/be/src/vec/sink/vresult_file_sink.h +++ b/be/src/vec/sink/vresult_file_sink.h @@ -34,7 +34,6 @@ namespace doris { class BufferControlBlock; class ObjectPool; -class QueryStatistics; class RuntimeProfile; class RuntimeState; class TDataSink; @@ -54,7 +53,6 @@ class VResultFileSink : public AsyncWriterSink& destinations, - bool send_query_statistics_with_every_batch, const std::vector& t_output_expr, DescriptorTbl& descs); Status init(const TDataSink& thrift_sink) override; @@ -65,8 +63,6 @@ class VResultFileSink : public AsyncWriterSink statistics) override; - private: // set file options when sink type is FILE std::unique_ptr _file_opts; diff --git a/be/src/vec/sink/vresult_sink.cpp b/be/src/vec/sink/vresult_sink.cpp index 3fa2e035976eeac..59bf82483c5fffd 100644 --- a/be/src/vec/sink/vresult_sink.cpp +++ b/be/src/vec/sink/vresult_sink.cpp @@ -41,7 +41,6 @@ #include "vec/sink/writer/vfile_result_writer.h" namespace doris { -class QueryStatistics; class RowDescriptor; class TExpr; @@ -169,9 +168,8 @@ Status VResultSink::close(RuntimeState* state, Status exec_status) { // close sender, this is normal path end if (_sender) { if (_writer) { - _sender->update_num_written_rows(_writer->get_written_rows()); + _sender->update_return_rows(_writer->get_written_rows()); } - _sender->update_max_peak_memory_bytes(); static_cast(_sender->close(final_status)); } static_cast(state->exec_env()->result_mgr()->cancel_at_time( @@ -180,9 +178,5 @@ Status VResultSink::close(RuntimeState* state, Status exec_status) { return DataSink::close(state, exec_status); } -void VResultSink::set_query_statistics(std::shared_ptr statistics) { - _sender->set_query_statistics(statistics); -} - } // namespace vectorized } // namespace doris diff --git a/be/src/vec/sink/vresult_sink.h b/be/src/vec/sink/vresult_sink.h index 0cde7399c451cd8..0dd69ee84cd4b27 100644 --- a/be/src/vec/sink/vresult_sink.h +++ b/be/src/vec/sink/vresult_sink.h @@ -34,7 +34,6 @@ namespace doris { class RuntimeState; class RuntimeProfile; class BufferControlBlock; -class QueryStatistics; class ResultWriter; class RowDescriptor; class TExpr; @@ -138,8 +137,6 @@ class VResultSink : public DataSink { // hosts. Further send() calls are illegal after calling close(). Status close(RuntimeState* state, Status exec_status) override; - void set_query_statistics(std::shared_ptr statistics) override; - private: Status prepare_exprs(RuntimeState* state); Status second_phase_fetch_data(RuntimeState* state, Block* final_block); diff --git a/be/src/vec/sink/writer/vtablet_writer_v2.cpp b/be/src/vec/sink/writer/vtablet_writer_v2.cpp index 58b26ae8f0a9f45..fcaf9a043dc5fbc 100644 --- a/be/src/vec/sink/writer/vtablet_writer_v2.cpp +++ b/be/src/vec/sink/writer/vtablet_writer_v2.cpp @@ -275,18 +275,19 @@ Status VTabletWriterV2::_open_streams_to_backend(int64_t dst_id, LoadStreams& st if (node_info == nullptr) { return Status::InternalError("Unknown node {} in tablet location", dst_id); } + auto idle_timeout_ms = _state->execution_timeout() * 1000; // get tablet schema from each backend only in the 1st stream for (auto& stream : streams.streams() | std::ranges::views::take(1)) { const std::vector& tablets_for_schema = _indexes_from_node[node_info->id]; RETURN_IF_ERROR(stream->open(stream, _state->exec_env()->brpc_internal_client_cache(), *node_info, _txn_id, *_schema, tablets_for_schema, - _total_streams, _state->enable_profile())); + _total_streams, idle_timeout_ms, _state->enable_profile())); } // for the rest streams, open without getting tablet schema for (auto& stream : streams.streams() | std::ranges::views::drop(1)) { RETURN_IF_ERROR(stream->open(stream, _state->exec_env()->brpc_internal_client_cache(), *node_info, _txn_id, *_schema, {}, _total_streams, - _state->enable_profile())); + idle_timeout_ms, _state->enable_profile())); } return Status::OK(); } diff --git a/be/src/vec/sink/writer/vwal_writer.cpp b/be/src/vec/sink/writer/vwal_writer.cpp index d429b8bbd9f130b..569f9bcd6527b35 100644 --- a/be/src/vec/sink/writer/vwal_writer.cpp +++ b/be/src/vec/sink/writer/vwal_writer.cpp @@ -21,6 +21,8 @@ #include +#include "util/debug_points.h" + namespace doris { namespace vectorized { @@ -63,6 +65,8 @@ Status VWalWriter::init() { } Status VWalWriter::write_wal(vectorized::Block* block) { + DBUG_EXECUTE_IF("VWalWriter.write_wal.fail", + { return Status::InternalError("Failed to write wal!"); }); PBlock pblock; size_t uncompressed_bytes = 0, compressed_bytes = 0; RETURN_IF_ERROR(block->serialize(_be_exe_version, &pblock, &uncompressed_bytes, diff --git a/be/test/vec/runtime/vdata_stream_test.cpp b/be/test/vec/runtime/vdata_stream_test.cpp index 2ac8f8a648204b8..86e6803fd6b3a12 100644 --- a/be/test/vec/runtime/vdata_stream_test.cpp +++ b/be/test/vec/runtime/vdata_stream_test.cpp @@ -171,9 +171,8 @@ TEST_F(VDataStreamTest, BasicTest) { int num_senders = 1; RuntimeProfile profile("profile"); bool is_merge = false; - std::shared_ptr statistics = std::make_shared(); auto recv = _instance.create_recvr(&runtime_stat, row_desc, uid, nid, num_senders, &profile, - is_merge, statistics); + is_merge); // Test Sender int sender_id = 1; @@ -194,10 +193,8 @@ TEST_F(VDataStreamTest, BasicTest) { dest.__set_server(addr); dests.push_back(dest); } - bool send_query_statistics_with_every_batch = false; VDataStreamSender sender(&runtime_stat, &_object_pool, sender_id, row_desc, tsink.stream_sink, - dests, send_query_statistics_with_every_batch); - sender.set_query_statistics(std::make_shared()); + dests); static_cast(sender.init(tsink)); static_cast(sender.prepare(&runtime_stat)); static_cast(sender.open(&runtime_stat)); diff --git a/docs/en/docs/sql-manual/sql-reference/Show-Statements/SHOW-PROCESSLIST.md b/docs/en/docs/sql-manual/sql-reference/Show-Statements/SHOW-PROCESSLIST.md index cde7757d8602370..a2910e6cbc99ef9 100644 --- a/docs/en/docs/sql-manual/sql-reference/Show-Statements/SHOW-PROCESSLIST.md +++ b/docs/en/docs/sql-manual/sql-reference/Show-Statements/SHOW-PROCESSLIST.md @@ -43,7 +43,7 @@ SHOW [FULL] PROCESSLIST illustrate: - CurrentConnected: Indicates whether the connection is currently connected -- Id: It is the unique identifier of this thread. When we find that there is a problem with this thread, we can use the kill command to add this Id value to kill this thread. Earlier we said that the information displayed by show processlist comes from the information_schema.processlist table, so this Id is the primary key of this table. +- Id: It is the unique identifier of this thread. When we find that there is a problem with this thread, we can use the kill command to add this Id value to kill this thread. - User: refers to the user who started this thread. - Host: Records the IP and port number of the client sending the request. Through this information, when troubleshooting the problem, we can locate which client and which process sent the request. - LoginTime: Timestamp when the connection is make. diff --git a/docs/zh-CN/docs/ecosystem/flink-doris-connector.md b/docs/zh-CN/docs/ecosystem/flink-doris-connector.md index c269d0931c13ade..6d9b2a8d976c6de 100644 --- a/docs/zh-CN/docs/ecosystem/flink-doris-connector.md +++ b/docs/zh-CN/docs/ecosystem/flink-doris-connector.md @@ -395,6 +395,7 @@ ON a.city = c.city ## Flink 写入指标 其中Counter类型的指标值为导入任务从开始到当前的累加值,可以在Flink Webui metrics中观察各表的各项指标。 + | Name | Metric Type | Description | | ------------------------- | ----------- | ------------------------------------------ | | totalFlushLoadBytes | Counter | 已经刷新导入的总字节数 | diff --git a/docs/zh-CN/docs/sql-manual/sql-reference/Show-Statements/SHOW-PROCESSLIST.md b/docs/zh-CN/docs/sql-manual/sql-reference/Show-Statements/SHOW-PROCESSLIST.md index 002da45ee54cb8d..255c8d724c04be1 100644 --- a/docs/zh-CN/docs/sql-manual/sql-reference/Show-Statements/SHOW-PROCESSLIST.md +++ b/docs/zh-CN/docs/sql-manual/sql-reference/Show-Statements/SHOW-PROCESSLIST.md @@ -43,7 +43,7 @@ SHOW [FULL] PROCESSLIST 说明: - CurrentConnected: 是否为当前连接。 -- Id: 就是这个线程的唯一标识,当我们发现这个线程有问题的时候,可以通过 kill 命令,加上这个Id值将这个线程杀掉。前面我们说了show processlist 显示的信息时来自information_schema.processlist 表,所以这个Id就是这个表的主键。 +- Id: 就是这个线程的唯一标识,当我们发现这个线程有问题的时候,可以通过 kill 命令,加上这个Id值将这个线程杀掉。 - User: 就是指启动这个线程的用户。 - Host: 记录了发送请求的客户端的 IP 和 端口号。通过这些信息在排查问题的时候,我们可以定位到是哪个客户端的哪个进程发送的请求。 - LoginTime: 建立连接的时间。 diff --git a/fe/fe-common/src/main/java/org/apache/doris/catalog/StructField.java b/fe/fe-common/src/main/java/org/apache/doris/catalog/StructField.java index 0dde56585763b5f..1f30b35dadfb954 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/catalog/StructField.java +++ b/fe/fe-common/src/main/java/org/apache/doris/catalog/StructField.java @@ -40,7 +40,7 @@ public class StructField { @SerializedName(value = "containsNull") private final boolean containsNull; // Now always true (nullable field) - private static final String DEFAULT_FIELD_NAME = "col"; + public static final String DEFAULT_FIELD_NAME = "col"; public StructField(String name, Type type, String comment, boolean containsNull) { this.name = name.toLowerCase(); diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/Config.java b/fe/fe-common/src/main/java/org/apache/doris/common/Config.java index 248b0f0dbe7f06e..17e859c2c0e870e 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/common/Config.java +++ b/fe/fe-common/src/main/java/org/apache/doris/common/Config.java @@ -2337,6 +2337,16 @@ public class Config extends ConfigBase { @ConfField(mutable = true, masterOnly = true) public static int workload_max_action_num_in_policy = 5; // mainly used to limit set session var action + @ConfField(mutable = true) + public static int workload_runtime_status_thread_interval_ms = 2000; + + // NOTE: it should bigger than be config report_query_statistics_interval_ms + @ConfField(mutable = true) + public static int query_audit_log_timeout_ms = 5000; + + @ConfField(mutable = true) + public static int be_report_query_statistics_timeout_ms = 60000; + @ConfField(mutable = true, masterOnly = true) public static int workload_group_max_num = 15; @@ -2420,4 +2430,19 @@ public class Config extends ConfigBase { "The max number work threads of http upload submitter." }) public static int http_load_submitter_max_worker_threads = 2; + + //========================================================================== + // begin of cloud config + //========================================================================== + + @ConfField + public static String cloud_unique_id = ""; + + public static boolean isCloudMode() { + return !cloud_unique_id.isEmpty(); + } + + //========================================================================== + // end of cloud config + //========================================================================== } diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/StructLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/StructLiteral.java index ffec5ee587fc4c0..ac67e0c16a75d91 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/StructLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/StructLiteral.java @@ -46,11 +46,26 @@ public StructLiteral() { public StructLiteral(LiteralExpr... exprs) throws AnalysisException { type = new StructType(); children = new ArrayList<>(); + for (int i = 0; i < exprs.length; i++) { + if (!StructType.STRUCT.supportSubType(exprs[i].getType())) { + throw new AnalysisException("Invalid element type in STRUCT: " + exprs[i].getType()); + } + ((StructType) type).addField( + new StructField(StructField.DEFAULT_FIELD_NAME + (i + 1), exprs[i].getType())); + children.add(exprs[i]); + } + } + + /** + * for nereids + */ + public StructLiteral(Type type, LiteralExpr... exprs) throws AnalysisException { + this.type = type; + this.children = new ArrayList<>(); for (LiteralExpr expr : exprs) { if (!StructType.STRUCT.supportSubType(expr.getType())) { throw new AnalysisException("Invalid element type in STRUCT: " + expr.getType()); } - ((StructType) type).addField(new StructField(expr.getType())); children.add(expr); } } @@ -104,8 +119,8 @@ public String getStringValueInFe() { // same with be default field index start with 1 for (int i = 0; i < children.size(); i++) { Expr child = children.get(i); - String fieldName = new StructField(child.getType()).getName(); - list.add("\"" + fieldName + (i + 1) + "\": " + getStringLiteralForComplexType(child)); + list.add("\"" + ((StructType) type).getFields().get(i).getName() + "\": " + + getStringLiteralForComplexType(child)); } return "{" + StringUtils.join(list, ", ") + "}"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java index 3b3f5939fd4f627..2f33e12da474b18 100755 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java @@ -230,6 +230,7 @@ import org.apache.doris.qe.VariableMgr; import org.apache.doris.resource.Tag; import org.apache.doris.resource.workloadgroup.WorkloadGroupMgr; +import org.apache.doris.resource.workloadschedpolicy.WorkloadRuntimeStatusMgr; import org.apache.doris.resource.workloadschedpolicy.WorkloadSchedPolicyMgr; import org.apache.doris.resource.workloadschedpolicy.WorkloadSchedPolicyPublisher; import org.apache.doris.scheduler.manager.TransientTaskManager; @@ -491,6 +492,9 @@ public class Env { private WorkloadGroupMgr workloadGroupMgr; private WorkloadSchedPolicyMgr workloadSchedPolicyMgr; + + private WorkloadRuntimeStatusMgr workloadRuntimeStatusMgr; + private QueryStats queryStats; private StatisticsCleaner statisticsCleaner; @@ -739,6 +743,7 @@ private Env(boolean isCheckpointCatalog) { this.globalFunctionMgr = new GlobalFunctionMgr(); this.workloadGroupMgr = new WorkloadGroupMgr(); this.workloadSchedPolicyMgr = new WorkloadSchedPolicyMgr(); + this.workloadRuntimeStatusMgr = new WorkloadRuntimeStatusMgr(); this.queryStats = new QueryStats(); this.loadManagerAdapter = new LoadManagerAdapter(); this.hiveTransactionMgr = new HiveTransactionMgr(); @@ -835,6 +840,10 @@ public WorkloadSchedPolicyMgr getWorkloadSchedPolicyMgr() { return workloadSchedPolicyMgr; } + public WorkloadRuntimeStatusMgr getWorkloadRuntimeStatusMgr() { + return workloadRuntimeStatusMgr; + } + // use this to get correct ClusterInfoService instance public static SystemInfoService getCurrentSystemInfo() { return getCurrentEnv().getClusterInfo(); @@ -1014,6 +1023,7 @@ public void initialize(String[] args) throws Exception { workloadGroupMgr.startUpdateThread(); workloadSchedPolicyMgr.start(); workloadActionPublisherThread.start(); + workloadRuntimeStatusMgr.start(); } // wait until FE is ready. diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java index 0de4b9497a9dce4..713fcc1bdfe016a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java @@ -646,7 +646,7 @@ public PlanFragment visitPhysicalOlapScan(PhysicalOlapScan olapScan, PlanTransla BaseTableRef tableRef = new BaseTableRef(ref, olapTable, tableName); tupleDescriptor.setRef(tableRef); olapScanNode.setSelectedPartitionIds(olapScan.getSelectedPartitionIds()); - olapScanNode.setSampleTabletIds(olapScan.getSelectedTabletIds()); // TODO + olapScanNode.setSampleTabletIds(olapScan.getSelectedTabletIds()); if (olapScan.getTableSample().isPresent()) { olapScanNode.setTableSample(new TableSample(olapScan.getTableSample().get().isPercent, olapScan.getTableSample().get().sampleValue, olapScan.getTableSample().get().seek)); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Cast.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Cast.java index e50acf7d01e84f5..76cb1826e551135 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Cast.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Cast.java @@ -92,7 +92,7 @@ public Cast withChildren(List children) { @Override public String toSql() throws UnboundException { - return "cast(" + child().toSql() + " as " + targetType + ")"; + return "cast(" + child().toSql() + " as " + targetType.toSql() + ")"; } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/CreateStruct.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/CreateStruct.java index cba64fdcce9ecd3..2cc2795f5f4d015 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/CreateStruct.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/CreateStruct.java @@ -22,9 +22,9 @@ import org.apache.doris.nereids.trees.expressions.functions.AlwaysNotNullable; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.ExpressionTrait; +import org.apache.doris.nereids.trees.expressions.literal.StructLiteral; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.DataType; -import org.apache.doris.nereids.types.StructField; import org.apache.doris.nereids.types.StructType; import com.google.common.collect.ImmutableList; @@ -66,11 +66,7 @@ public List getSignatures() { if (arity() == 0) { return SIGNATURES; } else { - ImmutableList.Builder structFields = ImmutableList.builder(); - for (int i = 0; i < arity(); i++) { - structFields.add(new StructField(String.valueOf(i + 1), children.get(i).getDataType(), true, "")); - } - return ImmutableList.of(FunctionSignature.ret(new StructType(structFields.build())) + return ImmutableList.of(FunctionSignature.ret(StructLiteral.computeDataType(children)) .args(children.stream().map(ExpressionTrait::getDataType).toArray(DataType[]::new))); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/ArrayLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/ArrayLiteral.java index 307e30093048215..486eeddabd71c84 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/ArrayLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/ArrayLiteral.java @@ -25,6 +25,7 @@ import org.apache.doris.nereids.types.DataType; import org.apache.doris.nereids.types.NullType; +import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import org.springframework.util.CollectionUtils; @@ -43,8 +44,7 @@ public class ArrayLiteral extends Literal { * construct array literal */ public ArrayLiteral(List items) { - super(ArrayType.of(CollectionUtils.isEmpty(items) ? NullType.INSTANCE : items.get(0).getDataType())); - this.items = ImmutableList.copyOf(Objects.requireNonNull(items, "items should not null")); + this(items, ArrayType.of(CollectionUtils.isEmpty(items) ? NullType.INSTANCE : items.get(0).getDataType())); } /** @@ -52,6 +52,8 @@ public ArrayLiteral(List items) { */ public ArrayLiteral(List items, DataType dataType) { super(dataType); + Preconditions.checkArgument(dataType instanceof ArrayType, + "dataType should be ArrayType, but we meet %s", dataType); this.items = ImmutableList.copyOf(Objects.requireNonNull(items, "items should not null")); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MapLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MapLiteral.java index 47b09de04d5768c..7dab827509bed41 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MapLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MapLiteral.java @@ -18,6 +18,8 @@ package org.apache.doris.nereids.trees.expressions.literal; import org.apache.doris.analysis.LiteralExpr; +import org.apache.doris.nereids.exceptions.AnalysisException; +import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.DataType; import org.apache.doris.nereids.types.MapType; @@ -43,9 +45,15 @@ public MapLiteral() { } public MapLiteral(List keys, List values) { - super(computeDataType(keys, values)); + this(keys, values, computeDataType(keys, values)); + } + + private MapLiteral(List keys, List values, DataType dataType) { + super(dataType); this.keys = ImmutableList.copyOf(Objects.requireNonNull(keys, "keys should not be null")); this.values = ImmutableList.copyOf(Objects.requireNonNull(values, "values should not be null")); + Preconditions.checkArgument(dataType instanceof MapType, + "dataType should be MapType, but we meet %s", dataType); Preconditions.checkArgument(keys.size() == values.size(), "key size %s is not equal to value size %s", keys.size(), values.size()); } @@ -55,6 +63,28 @@ public List> getValue() { return ImmutableList.of(keys, values); } + @Override + protected Expression uncheckedCastTo(DataType targetType) throws AnalysisException { + if (this.dataType.equals(targetType)) { + return this; + } else if (targetType instanceof MapType) { + // we should pass dataType to constructor because arguments maybe empty + return new MapLiteral( + keys.stream() + .map(k -> k.uncheckedCastTo(((MapType) targetType).getKeyType())) + .map(Literal.class::cast) + .collect(ImmutableList.toImmutableList()), + values.stream() + .map(v -> v.uncheckedCastTo(((MapType) targetType).getValueType())) + .map(Literal.class::cast) + .collect(ImmutableList.toImmutableList()), + targetType + ); + } else { + return super.uncheckedCastTo(targetType); + } + } + @Override public LiteralExpr toLegacyLiteral() { List keyExprs = keys.stream() diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/StructLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/StructLiteral.java index 0041673770723aa..4b4200c233d5352 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/StructLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/StructLiteral.java @@ -19,15 +19,17 @@ import org.apache.doris.analysis.LiteralExpr; import org.apache.doris.nereids.exceptions.AnalysisException; +import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; +import org.apache.doris.nereids.types.DataType; import org.apache.doris.nereids.types.StructField; import org.apache.doris.nereids.types.StructType; +import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import java.util.List; import java.util.Objects; -import java.util.stream.Collectors; /** * struct literal @@ -42,8 +44,17 @@ public StructLiteral() { } public StructLiteral(List fields) { - super(computeDataType(fields)); - this.fields = ImmutableList.copyOf(fields); + this(fields, computeDataType(fields)); + } + + private StructLiteral(List fields, DataType dataType) { + super(dataType); + this.fields = ImmutableList.copyOf(Objects.requireNonNull(fields, "fields should not be null")); + Preconditions.checkArgument(dataType instanceof StructType, + "dataType should be StructType, but we meet %s", dataType); + Preconditions.checkArgument(fields.size() == ((StructType) dataType).getFields().size(), + "fields size is not same with dataType size. %s vs %s", + fields.size(), ((StructType) dataType).getFields().size()); } @Override @@ -51,10 +62,30 @@ public List getValue() { return fields; } + @Override + protected Expression uncheckedCastTo(DataType targetType) throws AnalysisException { + if (this.dataType.equals(targetType)) { + return this; + } else if (targetType instanceof StructType) { + // we should pass dataType to constructor because arguments maybe empty + if (((StructType) targetType).getFields().size() != this.fields.size()) { + return super.uncheckedCastTo(targetType); + } + ImmutableList.Builder newLiterals = ImmutableList.builder(); + for (int i = 0; i < fields.size(); i++) { + newLiterals.add((Literal) fields.get(i) + .uncheckedCastTo(((StructType) targetType).getFields().get(i).getDataType())); + } + return new StructLiteral(newLiterals.build(), targetType); + } else { + return super.uncheckedCastTo(targetType); + } + } + @Override public LiteralExpr toLegacyLiteral() { try { - return new org.apache.doris.analysis.StructLiteral( + return new org.apache.doris.analysis.StructLiteral(dataType.toCatalogDataType(), fields.stream().map(Literal::toLegacyLiteral).toArray(LiteralExpr[]::new) ); } catch (Exception e) { @@ -89,7 +120,18 @@ public String toString() { @Override public String toSql() { - return "{" + fields.stream().map(Literal::toSql).collect(Collectors.joining(",")) + "}"; + StringBuilder sb = new StringBuilder(); + sb.append("STRUCT("); + for (int i = 0; i < fields.size(); i++) { + if (i != 0) { + sb.append(","); + } + sb.append("'").append(((StructType) dataType).getFields().get(i).getName()).append("'"); + sb.append(":"); + sb.append(fields.get(i).toSql()); + } + sb.append(")"); + return sb.toString(); } @Override @@ -97,10 +139,10 @@ public R accept(ExpressionVisitor visitor, C context) { return visitor.visitStructLiteral(this, context); } - private static StructType computeDataType(List fields) { + public static StructType computeDataType(List fields) { ImmutableList.Builder structFields = ImmutableList.builder(); for (int i = 0; i < fields.size(); i++) { - structFields.add(new StructField(String.valueOf(i + 1), fields.get(i).getDataType(), true, "")); + structFields.add(new StructField("col" + (i + 1), fields.get(i).getDataType(), true, "")); } return new StructType(structFields.build()); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/Plan.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/Plan.java index 521cba8483b8649..aa87de0fcab3c84 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/Plan.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/Plan.java @@ -163,7 +163,7 @@ default T getOrInitMutableState(String key, Supplier initState) { */ default String shape(String prefix) { StringBuilder builder = new StringBuilder(); - String me = shapeInfo(); + String me = this.getClass().getSimpleName(); String prefixTail = ""; if (! ConnectContext.get().getSessionVariable().getIgnoreShapePlanNodes().contains(me)) { builder.append(prefix).append(shapeInfo()).append("\n"); diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/OlapScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/OlapScanNode.java index f812c3d0fde87b3..e5381a2018fd84c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/planner/OlapScanNode.java +++ b/fe/fe-core/src/main/java/org/apache/doris/planner/OlapScanNode.java @@ -999,6 +999,7 @@ public void computeSampleTabletIds() { // 3. Sampling partition. If Seek is specified, the partition will be the same for each sampling. long hitRows = 0; // The number of rows hit by the tablet + Set hitTabletIds = Sets.newHashSet(); long partitionSeek = tableSample.getSeek() != -1 ? tableSample.getSeek() : (long) (new SecureRandom().nextDouble() * selectedPartitionList.size()); for (int i = 0; i < selectedPartitionList.size(); i++) { @@ -1024,16 +1025,24 @@ public void computeSampleTabletIds() { ? tableSample.getSeek() : (long) (new SecureRandom().nextDouble() * tablets.size()); for (int j = 0; j < tablets.size(); j++) { int seekTid = (int) ((j + tabletSeek) % tablets.size()); + Tablet tablet = tablets.get(seekTid); + if (sampleTabletIds.size() != 0 && !sampleTabletIds.contains(tablet.getId())) { + // After PruneOlapScanTablet, sampleTabletIds.size() != 0, + // continue sampling only in sampleTabletIds. + // If it is percentage sample, the number of sampled rows is a percentage of the + // total number of rows, and It is not related to sampleTabletI after PruneOlapScanTablet. + continue; + } long tabletRowCount; if (!FeConstants.runningUnitTest) { - tabletRowCount = tablets.get(seekTid).getRowCount(true); + tabletRowCount = tablet.getRowCount(true); } else { tabletRowCount = selectedTable.getRowCount() / tablets.size(); } if (tabletRowCount == 0) { continue; } - sampleTabletIds.add(tablets.get(seekTid).getId()); + hitTabletIds.add(tablet.getId()); sampleRows -= tabletRowCount; hitRows += tabletRowCount; if (sampleRows <= 0) { @@ -1044,7 +1053,15 @@ public void computeSampleTabletIds() { break; } } - LOG.debug("after computeSampleTabletIds, hitRows {}, selectedRows {}", hitRows, selectedRows); + if (sampleTabletIds.size() != 0) { + sampleTabletIds.retainAll(hitTabletIds); + LOG.debug("after computeSampleTabletIds, hitRows {}, totalRows {}, selectedTablets {}, sampleRows {}", + hitRows, selectedRows, sampleTabletIds.size(), totalSampleRows); + } else { + sampleTabletIds = hitTabletIds; + LOG.debug("after computeSampleTabletIds, hitRows {}, selectedRows {}, sampleRows {}", hitRows, selectedRows, + totalSampleRows); + } } public boolean isFromPrepareStmt() { diff --git a/fe/fe-core/src/main/java/org/apache/doris/plugin/AuditEvent.java b/fe/fe-core/src/main/java/org/apache/doris/plugin/AuditEvent.java index 01a03e8c267c98b..732d33c5e18e179 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/plugin/AuditEvent.java +++ b/fe/fe-core/src/main/java/org/apache/doris/plugin/AuditEvent.java @@ -100,6 +100,8 @@ public enum EventType { @AuditField(value = "FuzzyVariables") public String fuzzyVariables = ""; + public long pushToAuditLogQueueTime; + public static class AuditEventBuilder { private AuditEvent auditEvent = new AuditEvent(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogHelper.java b/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogHelper.java index 4181f66786c8cfe..83cd1d401f8b675 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogHelper.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogHelper.java @@ -119,6 +119,6 @@ public static void logAuditLog(ConnectContext ctx, String origStmt, StatementBas } } } - Env.getCurrentAuditEventProcessor().handleAuditEvent(ctx.getAuditEventBuilder().build()); + Env.getCurrentEnv().getWorkloadRuntimeStatusMgr().submitFinishQueryToAudit(ctx.getAuditEventBuilder().build()); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectProcessor.java b/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectProcessor.java index 8daf40054ad6e53..e4d8f8273f88f8c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectProcessor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectProcessor.java @@ -271,7 +271,8 @@ protected void handleQuery(MysqlCommand mysqlCommand, String originStmt) { break; } } - auditAfterExec(auditStmt, executor.getParsedStmt(), executor.getQueryStatisticsForAuditLog(), true); + auditAfterExec(auditStmt, executor.getParsedStmt(), executor.getQueryStatisticsForAuditLog(), + true); // execute failed, skip remaining stmts if (ctx.getState().getStateType() == MysqlStateType.ERR) { break; diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/QeProcessorImpl.java b/fe/fe-core/src/main/java/org/apache/doris/qe/QeProcessorImpl.java index a3ee3c09e4ed12f..b6d902b76cc61d1 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/QeProcessorImpl.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/QeProcessorImpl.java @@ -201,6 +201,15 @@ public TReportExecStatusResult reportExecStatus(TReportExecStatusParams params, LOG.debug("params: {}", params); } final TReportExecStatusResult result = new TReportExecStatusResult(); + + if (params.isSetReportWorkloadRuntimeStatus()) { + Env.getCurrentEnv().getWorkloadRuntimeStatusMgr().updateBeQueryStats(params.report_workload_runtime_status); + if (!params.isSetQueryId()) { + result.setStatus(new TStatus(TStatusCode.OK)); + return result; + } + } + final QueryInfo info = coordinatorMap.get(params.query_id); if (info == null) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java index 5a3c4d652272857..76c633911c7278a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java @@ -513,7 +513,7 @@ public class SessionVariable implements Serializable, Writable { public boolean isSingleSetVar = false; @VariableMgr.VarAttr(name = EXPAND_RUNTIME_FILTER_BY_INNER_JION) - public boolean expandRuntimeFilterByInnerJoin = false; + public boolean expandRuntimeFilterByInnerJoin = true; @VariableMgr.VarAttr(name = JDBC_CLICKHOUSE_QUERY_FINAL) public boolean jdbcClickhouseQueryFinal = false; diff --git a/fe/fe-core/src/main/java/org/apache/doris/resource/workloadschedpolicy/WorkloadRuntimeStatusMgr.java b/fe/fe-core/src/main/java/org/apache/doris/resource/workloadschedpolicy/WorkloadRuntimeStatusMgr.java new file mode 100644 index 000000000000000..085d844e616c944 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/resource/workloadschedpolicy/WorkloadRuntimeStatusMgr.java @@ -0,0 +1,223 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.resource.workloadschedpolicy; + +import org.apache.doris.catalog.Env; +import org.apache.doris.common.Config; +import org.apache.doris.common.util.Daemon; +import org.apache.doris.plugin.AuditEvent; +import org.apache.doris.thrift.TQueryStatistics; +import org.apache.doris.thrift.TReportWorkloadRuntimeStatusParams; + +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.locks.ReentrantReadWriteLock; + +public class WorkloadRuntimeStatusMgr { + + private static final Logger LOG = LogManager.getLogger(WorkloadRuntimeStatusMgr.class); + private Map> beToQueryStatsMap = Maps.newConcurrentMap(); + private Map beLastReportTime = Maps.newConcurrentMap(); + private Map queryLastReportTime = Maps.newConcurrentMap(); + private final ReentrantReadWriteLock queryAuditEventLock = new ReentrantReadWriteLock(); + private List queryAuditEventList = Lists.newLinkedList(); + + class WorkloadRuntimeStatsThread extends Daemon { + + WorkloadRuntimeStatusMgr workloadStatsMgr; + + public WorkloadRuntimeStatsThread(WorkloadRuntimeStatusMgr workloadRuntimeStatusMgr, String threadName, + int interval) { + super(threadName, interval); + this.workloadStatsMgr = workloadRuntimeStatusMgr; + } + + @Override + protected void runOneCycle() { + // 1 merge be query statistics + Map queryStatisticsMap = workloadStatsMgr.getQueryStatisticsMap(); + + // 2 log query audit + List auditEventList = workloadStatsMgr.getQueryNeedAudit(); + for (AuditEvent auditEvent : auditEventList) { + TQueryStatistics queryStats = queryStatisticsMap.get(auditEvent.queryId); + if (queryStats != null) { + auditEvent.scanRows = queryStats.scan_rows; + auditEvent.scanBytes = queryStats.scan_bytes; + auditEvent.peakMemoryBytes = queryStats.max_peak_memory_bytes; + } + Env.getCurrentAuditEventProcessor().handleAuditEvent(auditEvent); + } + + // 3 clear beToQueryStatsMap when be report timeout + workloadStatsMgr.clearReportTimeoutBeStatistics(); + } + + } + + private Daemon thread = null; + + public void submitFinishQueryToAudit(AuditEvent event) { + queryAuditEventLogWriteLock(); + try { + event.pushToAuditLogQueueTime = System.currentTimeMillis(); + queryAuditEventList.add(event); + } finally { + queryAuditEventLogWriteUnlock(); + } + } + + public List getQueryNeedAudit() { + List ret = new ArrayList<>(); + long currentTime = System.currentTimeMillis(); + queryAuditEventLogWriteLock(); + try { + int queryAuditLogTimeout = Config.query_audit_log_timeout_ms; + Iterator iter = queryAuditEventList.iterator(); + while (iter.hasNext()) { + AuditEvent ae = iter.next(); + if (currentTime - ae.pushToAuditLogQueueTime > queryAuditLogTimeout) { + ret.add(ae); + iter.remove(); + } else { + break; + } + } + } finally { + queryAuditEventLogWriteUnlock(); + } + return ret; + } + + public void start() { + thread = new WorkloadRuntimeStatsThread(this, "workload-runtime-stats-thread", + Config.workload_runtime_status_thread_interval_ms); + thread.start(); + } + + public void updateBeQueryStats(TReportWorkloadRuntimeStatusParams params) { + if (!params.isSetBackendId()) { + LOG.warn("be report workload runtime status but without beid"); + return; + } + if (!params.isSetQueryStatisticsMap()) { + LOG.warn("be report workload runtime status but without query stats map"); + return; + } + long beId = params.backend_id; + Map queryIdMap = beToQueryStatsMap.get(beId); + beLastReportTime.put(beId, System.currentTimeMillis()); + if (queryIdMap == null) { + queryIdMap = Maps.newConcurrentMap(); + queryIdMap.putAll(params.query_statistics_map); + beToQueryStatsMap.put(beId, queryIdMap); + } else { + long currentTime = System.currentTimeMillis(); + for (Map.Entry entry : params.query_statistics_map.entrySet()) { + queryIdMap.put(entry.getKey(), entry.getValue()); + queryLastReportTime.put(entry.getKey(), currentTime); + } + } + } + + public Map getQueryStatisticsMap() { + // 1 merge query stats in all be + Set beIdSet = beToQueryStatsMap.keySet(); + Map retQueryMap = Maps.newHashMap(); + for (Long beId : beIdSet) { + Map currentQueryMap = beToQueryStatsMap.get(beId); + Set queryIdSet = currentQueryMap.keySet(); + for (String queryId : queryIdSet) { + TQueryStatistics retQuery = retQueryMap.get(queryId); + if (retQuery == null) { + retQuery = new TQueryStatistics(); + retQueryMap.put(queryId, retQuery); + } + + TQueryStatistics curQueryStats = currentQueryMap.get(queryId); + mergeQueryStatistics(retQuery, curQueryStats); + } + } + + return retQueryMap; + } + + private void mergeQueryStatistics(TQueryStatistics dst, TQueryStatistics src) { + dst.scan_rows += src.scan_rows; + dst.scan_bytes += src.scan_bytes; + dst.cpu_ms += src.cpu_ms; + if (dst.max_peak_memory_bytes < src.max_peak_memory_bytes) { + dst.max_peak_memory_bytes = src.max_peak_memory_bytes; + } + } + + void clearReportTimeoutBeStatistics() { + // 1 clear report timeout be + Set beNeedToRemove = new HashSet<>(); + Set currentBeIdSet = beToQueryStatsMap.keySet(); + Long currentTime = System.currentTimeMillis(); + for (Long beId : currentBeIdSet) { + Long lastReportTime = beLastReportTime.get(beId); + if (lastReportTime != null + && currentTime - lastReportTime > Config.be_report_query_statistics_timeout_ms) { + beNeedToRemove.add(beId); + } + } + for (Long beId : beNeedToRemove) { + beToQueryStatsMap.remove(beId); + beLastReportTime.remove(beId); + } + + // 2 clear report timeout query + Set queryNeedToClear = new HashSet<>(); + Long newCurrentTime = System.currentTimeMillis(); + Set queryLastReportTimeKeySet = queryLastReportTime.keySet(); + for (String queryId : queryLastReportTimeKeySet) { + Long lastReportTime = queryLastReportTime.get(queryId); + if (lastReportTime != null + && newCurrentTime - lastReportTime > Config.be_report_query_statistics_timeout_ms) { + queryNeedToClear.add(queryId); + } + } + + Set beIdSet = beToQueryStatsMap.keySet(); + for (String queryId : queryNeedToClear) { + for (Long beId : beIdSet) { + beToQueryStatsMap.get(beId).remove(queryId); + } + queryLastReportTime.remove(queryId); + } + } + + private void queryAuditEventLogWriteLock() { + queryAuditEventLock.writeLock().lock(); + } + + private void queryAuditEventLogWriteUnlock() { + queryAuditEventLock.writeLock().unlock(); + } +} diff --git a/fe/fe-core/src/test/java/org/apache/doris/analysis/MapLiteralTest.java b/fe/fe-core/src/test/java/org/apache/doris/analysis/MapLiteralTest.java index 0ab4fbcbcaa9976..700c54253e49721 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/analysis/MapLiteralTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/analysis/MapLiteralTest.java @@ -107,7 +107,7 @@ public void testGetStringValueForArray() throws AnalysisException { new MapLiteral(structLiteral, stringLiteral); } catch (Exception e) { Assert.assertEquals("errCode = 2, detailMessage = Invalid key type in Map, " - + "not support STRUCT", e.getMessage()); + + "not support STRUCT", e.getMessage()); } } @@ -163,7 +163,7 @@ public void testGetStringInFe() throws AnalysisException { } catch (Exception e) { Assert.assertEquals("errCode = 2, " + "detailMessage = Invalid key type in Map, " - + "not support STRUCT", e.getMessage()); + + "not support STRUCT", e.getMessage()); } } diff --git a/fe/fe-core/src/test/java/org/apache/doris/analysis/SelectStmtTest.java b/fe/fe-core/src/test/java/org/apache/doris/analysis/SelectStmtTest.java index e2ecffaae56de6a..331ee4eb060fb03 100755 --- a/fe/fe-core/src/test/java/org/apache/doris/analysis/SelectStmtTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/analysis/SelectStmtTest.java @@ -961,6 +961,11 @@ public void testSelectSampleHashBucketTable() throws Exception { OriginalPlanner planner16 = (OriginalPlanner) dorisAssert.query(sql16).internalExecuteOneAndGetPlan(); Set sampleTabletIds16 = ((OlapScanNode) planner16.getScanNodes().get(0)).getSampleTabletIds(); Assert.assertEquals(1, sampleTabletIds16.size()); + + String sql17 = "SELECT * FROM db1.table1 TABLESAMPLE(15 PERCENT) where siteid != 0"; + OriginalPlanner planner17 = (OriginalPlanner) dorisAssert.query(sql17).internalExecuteOneAndGetPlan(); + Set sampleTabletIds17 = ((OlapScanNode) planner17.getScanNodes().get(0)).getSampleTabletIds(); + Assert.assertEquals(2, sampleTabletIds17.size()); FeConstants.runningUnitTest = false; } diff --git a/gensrc/proto/internal_service.proto b/gensrc/proto/internal_service.proto index e91e23fe2ecb621..16f065a45fb927b 100644 --- a/gensrc/proto/internal_service.proto +++ b/gensrc/proto/internal_service.proto @@ -754,6 +754,7 @@ message POpenLoadStreamRequest { repeated PTabletID tablets = 5; optional bool enable_profile = 6 [default = false]; optional int64 total_streams = 7; + optional int64 idle_timeout_ms = 8; } message PTabletSchemaWithIndex { diff --git a/gensrc/thrift/FrontendService.thrift b/gensrc/thrift/FrontendService.thrift index 24450e4d9085121..02d3efe50edb818 100644 --- a/gensrc/thrift/FrontendService.thrift +++ b/gensrc/thrift/FrontendService.thrift @@ -406,6 +406,11 @@ struct TQueryStatistics { 5: optional i64 max_peak_memory_bytes } +struct TReportWorkloadRuntimeStatusParams { + 1: optional i64 backend_id + 2: map query_statistics_map +} + // The results of an INSERT query, sent to the coordinator as part of // TReportExecStatusParams struct TReportExecStatusParams { @@ -470,6 +475,8 @@ struct TReportExecStatusParams { 23: optional list detailed_report 24: optional TQueryStatistics query_statistics + + 25: TReportWorkloadRuntimeStatusParams report_workload_runtime_status } struct TFeResult { diff --git a/regression-test/data/fault_injection_p0/test_wal_mem_back_pressure_fault_injection.csv.gz b/regression-test/data/fault_injection_p0/test_low_wal_disk_space_fault_injection.csv.gz similarity index 100% rename from regression-test/data/fault_injection_p0/test_wal_mem_back_pressure_fault_injection.csv.gz rename to regression-test/data/fault_injection_p0/test_low_wal_disk_space_fault_injection.csv.gz diff --git a/regression-test/data/nereids_p0/eliminate_outer_join/eliminate_outer_join.out b/regression-test/data/nereids_p0/eliminate_outer_join/eliminate_outer_join.out index 1307653cc2ca664..63c635b85e97b0a 100644 --- a/regression-test/data/nereids_p0/eliminate_outer_join/eliminate_outer_join.out +++ b/regression-test/data/nereids_p0/eliminate_outer_join/eliminate_outer_join.out @@ -1,20 +1,16 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !1 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF2 ps_suppkey->[s_suppkey] -------hashJoin[INNER_JOIN] hashCondition=((nation.n_nationkey = supplier.s_suppkey)) otherCondition=() build RFs:RF1 s_suppkey->[n_nationkey] ---------hashJoin[INNER_JOIN] hashCondition=((region.r_regionkey = nation.n_regionkey)) otherCondition=() build RFs:RF0 n_regionkey->[r_regionkey] -----------PhysicalOlapScan[region] apply RFs: RF0 -----------PhysicalDistribute[DistributionSpecReplicated] -------------filter(( not n_nationkey IS NULL) and (nation.n_nationkey > 1)) ---------------PhysicalOlapScan[nation] apply RFs: RF1 ---------PhysicalDistribute[DistributionSpecReplicated] -----------filter(( not s_suppkey IS NULL) and (supplier.s_suppkey > 1)) -------------PhysicalOlapScan[supplier] apply RFs: RF2 -------PhysicalDistribute[DistributionSpecReplicated] ---------filter(( not ps_suppkey IS NULL) and (partsupp.ps_suppkey > 1)) -----------PhysicalOlapScan[partsupp] +--hashJoin[INNER_JOIN] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() +----hashJoin[INNER_JOIN] hashCondition=((nation.n_nationkey = supplier.s_suppkey)) otherCondition=() +------hashJoin[INNER_JOIN] hashCondition=((region.r_regionkey = nation.n_regionkey)) otherCondition=() +--------PhysicalOlapScan[region] +--------filter(( not n_nationkey IS NULL) and (nation.n_nationkey > 1)) +----------PhysicalOlapScan[nation] +------filter(( not s_suppkey IS NULL) and (supplier.s_suppkey > 1)) +--------PhysicalOlapScan[supplier] +----filter(( not ps_suppkey IS NULL) and (partsupp.ps_suppkey > 1)) +------PhysicalOlapScan[partsupp] Hint log: Used: [broadcast]_2 @@ -23,22 +19,15 @@ SyntaxError: -- !2 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[RIGHT_OUTER_JOIN] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() -------PhysicalDistribute[DistributionSpecHash] ---------filter((supplier.s_suppkey > 1)) -----------hashJoin[FULL_OUTER_JOIN] hashCondition=((nation.n_nationkey = supplier.s_suppkey)) otherCondition=() -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[FULL_OUTER_JOIN] hashCondition=((region.r_regionkey = nation.n_regionkey)) otherCondition=() -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalOlapScan[region] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalOlapScan[nation] -------------PhysicalDistribute[DistributionSpecHash] ---------------PhysicalOlapScan[supplier] -------PhysicalDistribute[DistributionSpecHash] ---------filter(( not ps_suppkey IS NULL) and (partsupp.ps_suppkey > 1)) -----------PhysicalOlapScan[partsupp] +--hashJoin[RIGHT_OUTER_JOIN] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() +----filter((supplier.s_suppkey > 1)) +------hashJoin[FULL_OUTER_JOIN] hashCondition=((nation.n_nationkey = supplier.s_suppkey)) otherCondition=() +--------hashJoin[FULL_OUTER_JOIN] hashCondition=((region.r_regionkey = nation.n_regionkey)) otherCondition=() +----------PhysicalOlapScan[region] +----------PhysicalOlapScan[nation] +--------PhysicalOlapScan[supplier] +----filter(( not ps_suppkey IS NULL) and (partsupp.ps_suppkey > 1)) +------PhysicalOlapScan[partsupp] Hint log: Used: @@ -47,21 +36,15 @@ SyntaxError: -- !3 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[RIGHT_OUTER_JOIN] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() -------PhysicalDistribute[DistributionSpecHash] ---------filter((supplier.s_suppkey > 1)) -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((nation.n_nationkey = supplier.s_suppkey)) otherCondition=() -------------hashJoin[FULL_OUTER_JOIN] hashCondition=((region.r_regionkey = nation.n_regionkey)) otherCondition=() ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[region] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[nation] -------------PhysicalDistribute[DistributionSpecReplicated] ---------------PhysicalOlapScan[supplier] -------PhysicalDistribute[DistributionSpecHash] ---------filter(( not ps_suppkey IS NULL) and (partsupp.ps_suppkey > 1)) -----------PhysicalOlapScan[partsupp] +--hashJoin[RIGHT_OUTER_JOIN] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() +----filter((supplier.s_suppkey > 1)) +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((nation.n_nationkey = supplier.s_suppkey)) otherCondition=() +--------hashJoin[FULL_OUTER_JOIN] hashCondition=((region.r_regionkey = nation.n_regionkey)) otherCondition=() +----------PhysicalOlapScan[region] +----------PhysicalOlapScan[nation] +--------PhysicalOlapScan[supplier] +----filter(( not ps_suppkey IS NULL) and (partsupp.ps_suppkey > 1)) +------PhysicalOlapScan[partsupp] Hint log: Used: @@ -70,12 +53,10 @@ SyntaxError: -- !4 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_OUTER_JOIN] hashCondition=((region.r_regionkey = nation.n_regionkey)) otherCondition=() -------filter(( not r_name IS NULL) and (region.r_name = '')) ---------PhysicalOlapScan[region] -------PhysicalDistribute[DistributionSpecReplicated] ---------PhysicalOlapScan[nation] +--hashJoin[LEFT_OUTER_JOIN] hashCondition=((region.r_regionkey = nation.n_regionkey)) otherCondition=() +----filter(( not r_name IS NULL) and (region.r_name = '')) +------PhysicalOlapScan[region] +----PhysicalOlapScan[nation] Hint log: Used: [broadcast]_2 @@ -84,15 +65,12 @@ SyntaxError: -- !5 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_OUTER_JOIN] hashCondition=((nation.n_nationkey = supplier.s_suppkey)) otherCondition=() -------hashJoin[LEFT_OUTER_JOIN] hashCondition=((region.r_regionkey = nation.n_regionkey)) otherCondition=() ---------filter(( not r_name IS NULL) and (region.r_name = '')) -----------PhysicalOlapScan[region] ---------PhysicalDistribute[DistributionSpecReplicated] -----------PhysicalOlapScan[nation] -------PhysicalDistribute[DistributionSpecReplicated] ---------PhysicalOlapScan[supplier] +--hashJoin[LEFT_OUTER_JOIN] hashCondition=((nation.n_nationkey = supplier.s_suppkey)) otherCondition=() +----hashJoin[LEFT_OUTER_JOIN] hashCondition=((region.r_regionkey = nation.n_regionkey)) otherCondition=() +------filter(( not r_name IS NULL) and (region.r_name = '')) +--------PhysicalOlapScan[region] +------PhysicalOlapScan[nation] +----PhysicalOlapScan[supplier] Hint log: Used: [broadcast]_2 @@ -101,18 +79,14 @@ SyntaxError: -- !6 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_OUTER_JOIN] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() -------hashJoin[LEFT_OUTER_JOIN] hashCondition=((nation.n_nationkey = supplier.s_suppkey)) otherCondition=() ---------hashJoin[LEFT_OUTER_JOIN] hashCondition=((region.r_regionkey = nation.n_regionkey)) otherCondition=() -----------filter(( not r_name IS NULL) and (region.r_name = '')) -------------PhysicalOlapScan[region] -----------PhysicalDistribute[DistributionSpecReplicated] -------------PhysicalOlapScan[nation] ---------PhysicalDistribute[DistributionSpecReplicated] -----------PhysicalOlapScan[supplier] -------PhysicalDistribute[DistributionSpecReplicated] ---------PhysicalOlapScan[partsupp] +--hashJoin[LEFT_OUTER_JOIN] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() +----hashJoin[LEFT_OUTER_JOIN] hashCondition=((nation.n_nationkey = supplier.s_suppkey)) otherCondition=() +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((region.r_regionkey = nation.n_regionkey)) otherCondition=() +--------filter(( not r_name IS NULL) and (region.r_name = '')) +----------PhysicalOlapScan[region] +--------PhysicalOlapScan[nation] +------PhysicalOlapScan[supplier] +----PhysicalOlapScan[partsupp] Hint log: Used: [broadcast]_2 @@ -121,20 +95,15 @@ SyntaxError: -- !7 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[FULL_OUTER_JOIN] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() -------PhysicalDistribute[DistributionSpecHash] ---------hashJoin[LEFT_OUTER_JOIN] hashCondition=((nation.n_nationkey = supplier.s_suppkey)) otherCondition=() -----------hashJoin[INNER_JOIN] hashCondition=((region.r_regionkey = nation.n_regionkey)) otherCondition=() build RFs:RF0 n_regionkey->[r_regionkey] -------------filter(( not r_regionkey IS NULL)) ---------------PhysicalOlapScan[region] apply RFs: RF0 -------------PhysicalDistribute[DistributionSpecReplicated] ---------------filter(( not n_regionkey IS NULL)) -----------------PhysicalOlapScan[nation] -----------PhysicalDistribute[DistributionSpecReplicated] -------------PhysicalOlapScan[supplier] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[partsupp] +--hashJoin[FULL_OUTER_JOIN] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() +----hashJoin[LEFT_OUTER_JOIN] hashCondition=((nation.n_nationkey = supplier.s_suppkey)) otherCondition=() +------hashJoin[INNER_JOIN] hashCondition=((region.r_regionkey = nation.n_regionkey)) otherCondition=() +--------filter(( not r_regionkey IS NULL)) +----------PhysicalOlapScan[region] +--------filter(( not n_regionkey IS NULL)) +----------PhysicalOlapScan[nation] +------PhysicalOlapScan[supplier] +----PhysicalOlapScan[partsupp] Hint log: Used: [broadcast]_2 @@ -143,19 +112,15 @@ SyntaxError: -- !8 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_OUTER_JOIN] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() -------hashJoin[LEFT_OUTER_JOIN] hashCondition=((nation.n_nationkey = supplier.s_suppkey)) otherCondition=() ---------hashJoin[INNER_JOIN] hashCondition=((region.r_regionkey = nation.n_regionkey)) otherCondition=() build RFs:RF0 n_regionkey->[r_regionkey] -----------filter(( not r_name IS NULL) and ( not r_regionkey IS NULL) and (region.r_name = '')) -------------PhysicalOlapScan[region] apply RFs: RF0 -----------PhysicalDistribute[DistributionSpecReplicated] -------------filter(( not n_regionkey IS NULL)) ---------------PhysicalOlapScan[nation] ---------PhysicalDistribute[DistributionSpecReplicated] -----------PhysicalOlapScan[supplier] -------PhysicalDistribute[DistributionSpecReplicated] ---------PhysicalOlapScan[partsupp] +--hashJoin[LEFT_OUTER_JOIN] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() +----hashJoin[LEFT_OUTER_JOIN] hashCondition=((nation.n_nationkey = supplier.s_suppkey)) otherCondition=() +------hashJoin[INNER_JOIN] hashCondition=((region.r_regionkey = nation.n_regionkey)) otherCondition=() +--------filter(( not r_name IS NULL) and ( not r_regionkey IS NULL) and (region.r_name = '')) +----------PhysicalOlapScan[region] +--------filter(( not n_regionkey IS NULL)) +----------PhysicalOlapScan[nation] +------PhysicalOlapScan[supplier] +----PhysicalOlapScan[partsupp] Hint log: Used: [broadcast]_2 @@ -164,19 +129,15 @@ SyntaxError: -- !9 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_OUTER_JOIN] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() -------hashJoin[INNER_JOIN] hashCondition=((nation.n_nationkey = supplier.s_suppkey)) otherCondition=() build RFs:RF1 s_suppkey->[n_nationkey] ---------hashJoin[INNER_JOIN] hashCondition=((region.r_regionkey = nation.n_regionkey)) otherCondition=() build RFs:RF0 n_regionkey->[r_regionkey] -----------filter(( not r_name IS NULL) and ( not r_regionkey IS NULL) and (region.r_name = '')) -------------PhysicalOlapScan[region] apply RFs: RF0 -----------PhysicalDistribute[DistributionSpecReplicated] -------------filter(( not n_regionkey IS NULL)) ---------------PhysicalOlapScan[nation] apply RFs: RF1 ---------PhysicalDistribute[DistributionSpecReplicated] -----------PhysicalOlapScan[supplier] -------PhysicalDistribute[DistributionSpecReplicated] ---------PhysicalOlapScan[partsupp] +--hashJoin[LEFT_OUTER_JOIN] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() +----hashJoin[INNER_JOIN] hashCondition=((nation.n_nationkey = supplier.s_suppkey)) otherCondition=() +------hashJoin[INNER_JOIN] hashCondition=((region.r_regionkey = nation.n_regionkey)) otherCondition=() +--------filter(( not r_name IS NULL) and ( not r_regionkey IS NULL) and (region.r_name = '')) +----------PhysicalOlapScan[region] +--------filter(( not n_regionkey IS NULL)) +----------PhysicalOlapScan[nation] +------PhysicalOlapScan[supplier] +----PhysicalOlapScan[partsupp] Hint log: Used: [broadcast]_2 diff --git a/regression-test/data/nereids_rules_p0/eager_aggregate/basic.out b/regression-test/data/nereids_rules_p0/eager_aggregate/basic.out index 49380e018c44312..a52635563414f4d 100644 --- a/regression-test/data/nereids_rules_p0/eager_aggregate/basic.out +++ b/regression-test/data/nereids_rules_p0/eager_aggregate/basic.out @@ -1,55 +1,43 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !1 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((a.device_id = b.device_id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((a.device_id = b.device_id)) otherCondition=() build RFs:RF0 device_id->[device_id] -------------hashAgg[LOCAL] ---------------filter((a.event_id = 'ad_click')) -----------------PhysicalOlapScan[com_dd_library] apply RFs: RF0 -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------filter((cast(experiment_id as DOUBLE) = 37.0)) -------------------PhysicalOlapScan[shunt_log_com_dd_library] +----------filter((a.event_id = 'ad_click')) +------------PhysicalOlapScan[com_dd_library] +--------hashAgg[LOCAL] +----------filter((cast(experiment_id as DOUBLE) = 37.0)) +------------PhysicalOlapScan[shunt_log_com_dd_library] -- !2 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((a.device_id = b.device_id)) otherCondition=() +--------hashAgg[LOCAL] +----------PhysicalOlapScan[com_dd_library] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((a.device_id = b.device_id)) otherCondition=() build RFs:RF0 device_id->[device_id] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[com_dd_library] apply RFs: RF0 -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------filter((cast(experiment_id as DOUBLE) = 73.0)) -------------------PhysicalOlapScan[shunt_log_com_dd_library] +----------filter((cast(experiment_id as DOUBLE) = 73.0)) +------------PhysicalOlapScan[shunt_log_com_dd_library] -- !3 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((a.device_id = b.device_id)) otherCondition=() build RFs:RF0 device_id->[device_id] -------------PhysicalOlapScan[com_dd_library] apply RFs: RF0 -------------PhysicalDistribute[DistributionSpecHash] ---------------filter((cast(experiment_id as DOUBLE) = 73.0)) -----------------PhysicalOlapScan[shunt_log_com_dd_library] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((a.device_id = b.device_id)) otherCondition=() +--------PhysicalOlapScan[com_dd_library] +--------filter((cast(experiment_id as DOUBLE) = 73.0)) +----------PhysicalOlapScan[shunt_log_com_dd_library] -- !4 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((a.device_id = b.device_id)) otherCondition=() +--------hashAgg[LOCAL] +----------PhysicalOlapScan[com_dd_library] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((a.device_id = b.device_id)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[com_dd_library] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalOlapScan[shunt_log_com_dd_library] +----------PhysicalOlapScan[shunt_log_com_dd_library] diff --git a/regression-test/data/nereids_rules_p0/eager_aggregate/basic_one_side.out b/regression-test/data/nereids_rules_p0/eager_aggregate/basic_one_side.out index 8b90b18b9965bbd..087b10ea4af8c46 100644 --- a/regression-test/data/nereids_rules_p0/eager_aggregate/basic_one_side.out +++ b/regression-test/data/nereids_rules_p0/eager_aggregate/basic_one_side.out @@ -1,52 +1,40 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !1 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((a.device_id = b.device_id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((a.device_id = b.device_id)) otherCondition=() build RFs:RF0 device_id->[device_id] -------------hashAgg[LOCAL] ---------------filter((a.event_id = 'ad_click')) -----------------PhysicalOlapScan[com_dd_library_one_side] apply RFs: RF0 -------------PhysicalDistribute[DistributionSpecHash] ---------------filter((cast(experiment_id as DOUBLE) = 37.0)) -----------------PhysicalOlapScan[shunt_log_com_dd_library_one_side] +----------filter((a.event_id = 'ad_click')) +------------PhysicalOlapScan[com_dd_library_one_side] +--------filter((cast(experiment_id as DOUBLE) = 37.0)) +----------PhysicalOlapScan[shunt_log_com_dd_library_one_side] -- !2 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((a.device_id = b.device_id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((a.device_id = b.device_id)) otherCondition=() build RFs:RF0 device_id->[device_id] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[com_dd_library_one_side] apply RFs: RF0 -------------PhysicalDistribute[DistributionSpecHash] ---------------filter((cast(experiment_id as DOUBLE) = 73.0)) -----------------PhysicalOlapScan[shunt_log_com_dd_library_one_side] +----------PhysicalOlapScan[com_dd_library_one_side] +--------filter((cast(experiment_id as DOUBLE) = 73.0)) +----------PhysicalOlapScan[shunt_log_com_dd_library_one_side] -- !3 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((a.device_id = b.device_id)) otherCondition=() build RFs:RF0 device_id->[device_id] -------------PhysicalOlapScan[com_dd_library_one_side] apply RFs: RF0 -------------PhysicalDistribute[DistributionSpecHash] ---------------filter((cast(experiment_id as DOUBLE) = 73.0)) -----------------PhysicalOlapScan[shunt_log_com_dd_library_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((a.device_id = b.device_id)) otherCondition=() +--------PhysicalOlapScan[com_dd_library_one_side] +--------filter((cast(experiment_id as DOUBLE) = 73.0)) +----------PhysicalOlapScan[shunt_log_com_dd_library_one_side] -- !4 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((a.device_id = b.device_id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((a.device_id = b.device_id)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[com_dd_library_one_side] -------------PhysicalDistribute[DistributionSpecHash] ---------------PhysicalOlapScan[shunt_log_com_dd_library_one_side] +----------PhysicalOlapScan[com_dd_library_one_side] +--------PhysicalOlapScan[shunt_log_com_dd_library_one_side] diff --git a/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_count_through_join.out b/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_count_through_join.out index 135190f5ceb9824..2cdf0df3cf50316 100644 --- a/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_count_through_join.out +++ b/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_count_through_join.out @@ -1,557 +1,461 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !groupby_pushdown_basic -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] +----------PhysicalOlapScan[count_t] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[count_t] -- !groupby_pushdown_left_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_right_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_full_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_left_semi_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] +----------PhysicalOlapScan[count_t] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[count_t] -- !groupby_pushdown_left_anti_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_complex_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.name < t2.name)) -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.name < t2.name)) +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_with_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((count_t.score > 10)) -----------------PhysicalOlapScan[count_t] +----------PhysicalOlapScan[count_t] +--------hashAgg[LOCAL] +----------filter((count_t.score > 10)) +------------PhysicalOlapScan[count_t] -- !groupby_pushdown_outer_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_deep_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((count_t.score > 10)) -----------------PhysicalOlapScan[count_t] +----------PhysicalOlapScan[count_t] +--------hashAgg[LOCAL] +----------filter((count_t.score > 10)) +------------PhysicalOlapScan[count_t] -- !groupby_pushdown_having -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter((count(score) > 100)) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] +--filter((count(score) > 100)) +----hashAgg[GLOBAL] +------hashAgg[LOCAL] +--------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------hashAgg[LOCAL] +------------PhysicalOlapScan[count_t] ----------hashAgg[LOCAL] -------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------hashAgg[LOCAL] -----------------PhysicalOlapScan[count_t] ---------------hashAgg[LOCAL] -----------------PhysicalOlapScan[count_t] +------------PhysicalOlapScan[count_t] -- !groupby_pushdown_mixed_aggregates -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_multi_table_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[LOCAL] -------hashJoin[INNER_JOIN] hashCondition=((t1.name = t3.name)) otherCondition=() ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] +--hashAgg[LOCAL] +----hashJoin[INNER_JOIN] hashCondition=((t1.name = t3.name)) otherCondition=() +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[count_t] -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[count_t] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] +--------------PhysicalOlapScan[count_t] ------------hashAgg[LOCAL] --------------PhysicalOlapScan[count_t] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[count_t] -- !groupby_pushdown_with_order_by -- PhysicalResultSink --PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[count_t] -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[count_t] - --- !groupby_pushdown_multiple_equal_conditions -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +----PhysicalQuickSort[LOCAL_SORT] +------hashAgg[GLOBAL] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] --------------PhysicalOlapScan[count_t] ------------hashAgg[LOCAL] --------------PhysicalOlapScan[count_t] --- !groupby_pushdown_equal_conditions_with_aggregate -- +-- !groupby_pushdown_multiple_equal_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +----------PhysicalOlapScan[count_t] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[count_t] + +-- !groupby_pushdown_equal_conditions_with_aggregate -- +PhysicalResultSink +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_equal_conditions_non_aggregate_selection -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] +----------PhysicalOlapScan[count_t] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[count_t] -- !groupby_pushdown_equal_conditions_non_aggregate_selection_with_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] +----------PhysicalOlapScan[count_t] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[count_t] -- !groupby_pushdown_with_where_clause -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((t1.score > 50)) -----------------PhysicalOlapScan[count_t] +----------PhysicalOlapScan[count_t] +--------hashAgg[LOCAL] +----------filter((t1.score > 50)) +------------PhysicalOlapScan[count_t] -- !groupby_pushdown_varied_aggregates -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_with_order_by_limit -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[count_t] -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[count_t] - --- !groupby_pushdown_alias_multiple_equal_conditions -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1_alias.id = t2_alias.id) and (t1_alias.name = t2_alias.name)) otherCondition=() +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] --------------PhysicalOlapScan[count_t] ------------hashAgg[LOCAL] --------------PhysicalOlapScan[count_t] --- !groupby_pushdown_complex_join_condition -- +-- !groupby_pushdown_alias_multiple_equal_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1_alias.id = t2_alias.id) and (t1_alias.name = t2_alias.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.score = t2.score)) otherCondition=(( not (name = name))) -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +----------PhysicalOlapScan[count_t] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[count_t] + +-- !groupby_pushdown_complex_join_condition -- +PhysicalResultSink +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.score = t2.score)) otherCondition=(( not (name = name))) +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_function_processed_columns -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_nested_queries -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------hashAgg[LOCAL] ---------------filter((count_t.id < 100)) -----------------PhysicalOlapScan[count_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((count_t.score > 20) and (t1.id < 100)) -----------------PhysicalOlapScan[count_t] +----------filter((count_t.id < 100)) +------------PhysicalOlapScan[count_t] +--------hashAgg[LOCAL] +----------filter((count_t.score > 20) and (t1.id < 100)) +------------PhysicalOlapScan[count_t] -- !groupby_pushdown_basic -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] +----------PhysicalOlapScan[count_t] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[count_t] -- !groupby_pushdown_left_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_right_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_full_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_left_semi_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] +----------PhysicalOlapScan[count_t] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[count_t] -- !groupby_pushdown_left_anti_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_complex_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.name < t2.name)) -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.name < t2.name)) +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_with_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] +----------PhysicalOlapScan[count_t] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[count_t] -- !groupby_pushdown_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((count_t.score > 10)) -----------------PhysicalOlapScan[count_t] +----------PhysicalOlapScan[count_t] +--------hashAgg[LOCAL] +----------filter((count_t.score > 10)) +------------PhysicalOlapScan[count_t] -- !groupby_pushdown_outer_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_deep_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((count_t.score > 10)) -----------------PhysicalOlapScan[count_t] +----------PhysicalOlapScan[count_t] +--------hashAgg[LOCAL] +----------filter((count_t.score > 10)) +------------PhysicalOlapScan[count_t] -- !groupby_pushdown_having -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter((count(*) > 100)) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] +--filter((count(*) > 100)) +----hashAgg[GLOBAL] +------hashAgg[LOCAL] +--------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ----------hashAgg[LOCAL] -------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------hashAgg[LOCAL] -----------------PhysicalOlapScan[count_t] ---------------hashAgg[LOCAL] -----------------PhysicalOlapScan[count_t] +------------PhysicalOlapScan[count_t] +----------hashAgg[LOCAL] +------------PhysicalOlapScan[count_t] -- !groupby_pushdown_multi_table_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[LOCAL] -------hashJoin[INNER_JOIN] hashCondition=((t1.name = t3.name)) otherCondition=() ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] +--hashAgg[LOCAL] +----hashJoin[INNER_JOIN] hashCondition=((t1.name = t3.name)) otherCondition=() +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[count_t] -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[count_t] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] +--------------PhysicalOlapScan[count_t] ------------hashAgg[LOCAL] --------------PhysicalOlapScan[count_t] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[count_t] -- !groupby_pushdown_with_order_by -- PhysicalResultSink --PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[count_t] -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[count_t] - --- !groupby_pushdown_multiple_equal_conditions -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +----PhysicalQuickSort[LOCAL_SORT] +------hashAgg[GLOBAL] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] --------------PhysicalOlapScan[count_t] ------------hashAgg[LOCAL] --------------PhysicalOlapScan[count_t] +-- !groupby_pushdown_multiple_equal_conditions -- +PhysicalResultSink +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() +--------hashAgg[LOCAL] +----------PhysicalOlapScan[count_t] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[count_t] + -- !groupby_pushdown_equal_conditions_non_aggregate_selection -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] +----------PhysicalOlapScan[count_t] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[count_t] -- !groupby_pushdown_with_where_clause -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((t1.score > 50)) -----------------PhysicalOlapScan[count_t] +----------PhysicalOlapScan[count_t] +--------hashAgg[LOCAL] +----------filter((t1.score > 50)) +------------PhysicalOlapScan[count_t] -- !groupby_pushdown_varied_aggregates -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_with_order_by_limit -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[count_t] -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[count_t] +--------------PhysicalOlapScan[count_t] +------------hashAgg[LOCAL] +--------------PhysicalOlapScan[count_t] -- !groupby_pushdown_complex_join_condition -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.score = t2.score)) otherCondition=(( not (name = name))) -------------PhysicalOlapScan[count_t] -------------PhysicalOlapScan[count_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.score = t2.score)) otherCondition=(( not (name = name))) +--------PhysicalOlapScan[count_t] +--------PhysicalOlapScan[count_t] -- !groupby_pushdown_nested_queries -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------hashAgg[LOCAL] ---------------filter((count_t.id < 100)) -----------------PhysicalOlapScan[count_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((count_t.score > 20) and (t1.id < 100)) -----------------PhysicalOlapScan[count_t] +----------filter((count_t.id < 100)) +------------PhysicalOlapScan[count_t] +--------hashAgg[LOCAL] +----------filter((count_t.score > 20) and (t1.id < 100)) +------------PhysicalOlapScan[count_t] diff --git a/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_count_through_join_one_side.out b/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_count_through_join_one_side.out index f35a74ae3df8516..59c57e460e8d242 100644 --- a/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_count_through_join_one_side.out +++ b/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_count_through_join_one_side.out @@ -1,513 +1,417 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !groupby_pushdown_basic -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +----------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_left_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_right_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_full_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_left_semi_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +----------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_left_anti_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_complex_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.name < t2.name)) -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.name < t2.name)) +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_with_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------PhysicalOlapScan[count_t_one_side] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((count_t_one_side.score > 10)) -----------------PhysicalOlapScan[count_t_one_side] +----------filter((count_t_one_side.score > 10)) +------------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_outer_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_deep_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------PhysicalOlapScan[count_t_one_side] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((count_t_one_side.score > 10)) -----------------PhysicalOlapScan[count_t_one_side] +----------filter((count_t_one_side.score > 10)) +------------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_having -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter((count(score) > 100)) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] +--filter((count(score) > 100)) +----hashAgg[GLOBAL] +------hashAgg[LOCAL] +--------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ----------hashAgg[LOCAL] -------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------hashAgg[LOCAL] -----------------PhysicalOlapScan[count_t_one_side] ---------------PhysicalOlapScan[count_t_one_side] +------------PhysicalOlapScan[count_t_one_side] +----------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_mixed_aggregates -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_multi_table_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[LOCAL] -------hashJoin[INNER_JOIN] hashCondition=((t1.name = t3.name)) otherCondition=() ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] +--hashAgg[LOCAL] +----hashJoin[INNER_JOIN] hashCondition=((t1.name = t3.name)) otherCondition=() +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[count_t_one_side] -----------------PhysicalOlapScan[count_t_one_side] ---------PhysicalDistribute[DistributionSpecHash] -----------PhysicalOlapScan[count_t_one_side] +--------------PhysicalOlapScan[count_t_one_side] +------------PhysicalOlapScan[count_t_one_side] +------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_with_order_by -- PhysicalResultSink --PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] +----PhysicalQuickSort[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[count_t_one_side] -----------------PhysicalOlapScan[count_t_one_side] +--------------PhysicalOlapScan[count_t_one_side] +------------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_multiple_equal_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +----------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_equal_conditions_with_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_equal_conditions_non_aggregate_selection -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +----------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_equal_conditions_non_aggregate_selection_with_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t_one_side] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t_one_side] +----------PhysicalOlapScan[count_t_one_side] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_with_where_clause -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------PhysicalOlapScan[count_t_one_side] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((t1.score > 50)) -----------------PhysicalOlapScan[count_t_one_side] +----------filter((t1.score > 50)) +------------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_varied_aggregates -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_with_order_by_limit -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[count_t_one_side] -----------------PhysicalOlapScan[count_t_one_side] +--------------PhysicalOlapScan[count_t_one_side] +------------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_alias_multiple_equal_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1_alias.id = t2_alias.id) and (t1_alias.name = t2_alias.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1_alias.id = t2_alias.id) and (t1_alias.name = t2_alias.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +----------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_complex_join_condition -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.score = t2.score)) otherCondition=(( not (name = name))) -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.score = t2.score)) otherCondition=(( not (name = name))) +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_function_processed_columns -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_nested_queries -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------filter((count_t_one_side.id < 100)) +----------PhysicalOlapScan[count_t_one_side] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------filter((count_t_one_side.id < 100)) ---------------PhysicalOlapScan[count_t_one_side] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((count_t_one_side.score > 20) and (t1.id < 100)) -----------------PhysicalOlapScan[count_t_one_side] +----------filter((count_t_one_side.score > 20) and (t1.id < 100)) +------------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_basic -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_left_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_right_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_full_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_left_semi_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_left_anti_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_complex_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.name < t2.name)) -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.name < t2.name)) +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_with_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------PhysicalOlapScan[count_t_one_side] apply RFs: RF0 -------------filter((count_t_one_side.score > 10)) ---------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------filter((count_t_one_side.score > 10)) +----------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_outer_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_deep_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------PhysicalOlapScan[count_t_one_side] apply RFs: RF0 -------------filter((count_t_one_side.score > 10)) ---------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------filter((count_t_one_side.score > 10)) +----------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_having -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter((count(*) > 100)) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------PhysicalOlapScan[count_t_one_side] ---------------PhysicalOlapScan[count_t_one_side] +--filter((count(*) > 100)) +----hashAgg[GLOBAL] +------hashAgg[LOCAL] +--------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[count_t_one_side] +----------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_multi_table_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.name = t3.name)) otherCondition=() -------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------PhysicalOlapScan[count_t_one_side] ---------------PhysicalOlapScan[count_t_one_side] -------------PhysicalDistribute[DistributionSpecReplicated] ---------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.name = t3.name)) otherCondition=() +--------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[count_t_one_side] +----------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_with_order_by -- PhysicalResultSink --PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------PhysicalOlapScan[count_t_one_side] -----------------PhysicalOlapScan[count_t_one_side] - --- !groupby_pushdown_multiple_equal_conditions -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +----PhysicalQuickSort[LOCAL_SORT] +------hashAgg[GLOBAL] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------PhysicalOlapScan[count_t_one_side] ------------PhysicalOlapScan[count_t_one_side] +-- !groupby_pushdown_multiple_equal_conditions -- +PhysicalResultSink +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] + -- !groupby_pushdown_equal_conditions_non_aggregate_selection -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_with_where_clause -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------PhysicalOlapScan[count_t_one_side] apply RFs: RF0 -------------filter((t1.score > 50)) ---------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------filter((t1.score > 50)) +----------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_varied_aggregates -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[count_t_one_side] -------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] -- !groupby_pushdown_with_order_by_limit -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------PhysicalOlapScan[count_t_one_side] -----------------PhysicalOlapScan[count_t_one_side] - --- !groupby_pushdown_complex_join_condition -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.score = t2.score)) otherCondition=(( not (name = name))) +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------PhysicalOlapScan[count_t_one_side] ------------PhysicalOlapScan[count_t_one_side] +-- !groupby_pushdown_complex_join_condition -- +PhysicalResultSink +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.score = t2.score)) otherCondition=(( not (name = name))) +--------PhysicalOlapScan[count_t_one_side] +--------PhysicalOlapScan[count_t_one_side] + -- !groupby_pushdown_nested_queries -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------filter((count_t_one_side.id < 100)) ---------------PhysicalOlapScan[count_t_one_side] apply RFs: RF0 -------------filter((count_t_one_side.score > 20) and (t1.id < 100)) ---------------PhysicalOlapScan[count_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------filter((count_t_one_side.id < 100)) +----------PhysicalOlapScan[count_t_one_side] +--------filter((count_t_one_side.score > 20) and (t1.id < 100)) +----------PhysicalOlapScan[count_t_one_side] diff --git a/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_max_through_join.out b/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_max_through_join.out index 7f8839733ca251c..bd4430fcb662cac 100644 --- a/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_max_through_join.out +++ b/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_max_through_join.out @@ -1,290 +1,237 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !groupby_pushdown_basic -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[max_t] -------------PhysicalOlapScan[max_t] +----------PhysicalOlapScan[max_t] +--------PhysicalOlapScan[max_t] -- !groupby_pushdown_left_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[max_t] -------------PhysicalOlapScan[max_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[max_t] +--------PhysicalOlapScan[max_t] -- !groupby_pushdown_right_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[max_t] -------------PhysicalOlapScan[max_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[max_t] +--------PhysicalOlapScan[max_t] -- !groupby_pushdown_full_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[max_t] -------------PhysicalOlapScan[max_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[max_t] +--------PhysicalOlapScan[max_t] -- !groupby_pushdown_left_semi_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[max_t] -------------PhysicalOlapScan[max_t] +----------PhysicalOlapScan[max_t] +--------PhysicalOlapScan[max_t] -- !groupby_pushdown_left_anti_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[max_t] -------------PhysicalOlapScan[max_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[max_t] +--------PhysicalOlapScan[max_t] -- !groupby_pushdown_complex_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.name < t2.name)) -------------PhysicalOlapScan[max_t] -------------PhysicalOlapScan[max_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.name < t2.name)) +--------PhysicalOlapScan[max_t] +--------PhysicalOlapScan[max_t] -- !groupby_pushdown_with_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[max_t] -------------PhysicalOlapScan[max_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[max_t] +--------PhysicalOlapScan[max_t] -- !groupby_pushdown_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[max_t] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------PhysicalOlapScan[max_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((max_t.score > 10)) -----------------PhysicalOlapScan[max_t] +----------filter((max_t.score > 10)) +------------PhysicalOlapScan[max_t] -- !groupby_pushdown_outer_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[max_t] -------------PhysicalOlapScan[max_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[max_t] +--------PhysicalOlapScan[max_t] -- !groupby_pushdown_deep_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[max_t] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------PhysicalOlapScan[max_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((max_t.score > 10)) -----------------PhysicalOlapScan[max_t] +----------filter((max_t.score > 10)) +------------PhysicalOlapScan[max_t] -- !groupby_pushdown_having -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter((max(score) > 100)) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] +--filter((max(score) > 100)) +----hashAgg[GLOBAL] +------hashAgg[LOCAL] +--------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ----------hashAgg[LOCAL] -------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------hashAgg[LOCAL] -----------------PhysicalOlapScan[max_t] ---------------PhysicalOlapScan[max_t] +------------PhysicalOlapScan[max_t] +----------PhysicalOlapScan[max_t] -- !groupby_pushdown_mixed_aggregates -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[max_t] -------------PhysicalOlapScan[max_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[max_t] +--------PhysicalOlapScan[max_t] -- !groupby_pushdown_multi_table_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[LOCAL] -------hashJoin[INNER_JOIN] hashCondition=((t1.name = t3.name)) otherCondition=() ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] +--hashAgg[LOCAL] +----hashJoin[INNER_JOIN] hashCondition=((t1.name = t3.name)) otherCondition=() +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[max_t] -----------------PhysicalOlapScan[max_t] ---------PhysicalDistribute[DistributionSpecHash] -----------PhysicalOlapScan[max_t] +--------------PhysicalOlapScan[max_t] +------------PhysicalOlapScan[max_t] +------PhysicalOlapScan[max_t] -- !groupby_pushdown_with_order_by -- PhysicalResultSink --PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] +----PhysicalQuickSort[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[max_t] -----------------PhysicalOlapScan[max_t] +--------------PhysicalOlapScan[max_t] +------------PhysicalOlapScan[max_t] -- !groupby_pushdown_multiple_equal_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[max_t] -------------PhysicalOlapScan[max_t] +----------PhysicalOlapScan[max_t] +--------PhysicalOlapScan[max_t] -- !groupby_pushdown_equal_conditions_with_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------PhysicalOlapScan[max_t] -------------PhysicalOlapScan[max_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() +--------PhysicalOlapScan[max_t] +--------PhysicalOlapScan[max_t] -- !groupby_pushdown_equal_conditions_non_aggregate_selection -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[max_t] -------------PhysicalOlapScan[max_t] +----------PhysicalOlapScan[max_t] +--------PhysicalOlapScan[max_t] -- !groupby_pushdown_equal_conditions_non_aggregate_selection_with_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[max_t] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[max_t] +----------PhysicalOlapScan[max_t] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[max_t] -- !groupby_pushdown_with_where_clause -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[max_t] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------PhysicalOlapScan[max_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((t1.score > 50)) -----------------PhysicalOlapScan[max_t] +----------filter((t1.score > 50)) +------------PhysicalOlapScan[max_t] -- !groupby_pushdown_varied_aggregates -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[max_t] -------------PhysicalOlapScan[max_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[max_t] +--------PhysicalOlapScan[max_t] -- !groupby_pushdown_with_order_by_limit -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[max_t] -----------------PhysicalOlapScan[max_t] +--------------PhysicalOlapScan[max_t] +------------PhysicalOlapScan[max_t] -- !groupby_pushdown_alias_multiple_equal_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1_alias.id = t2_alias.id) and (t1_alias.name = t2_alias.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1_alias.id = t2_alias.id) and (t1_alias.name = t2_alias.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[max_t] -------------PhysicalOlapScan[max_t] +----------PhysicalOlapScan[max_t] +--------PhysicalOlapScan[max_t] -- !groupby_pushdown_complex_join_condition -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.score = t2.score)) otherCondition=(( not (name = name))) -------------PhysicalOlapScan[max_t] -------------PhysicalOlapScan[max_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.score = t2.score)) otherCondition=(( not (name = name))) +--------PhysicalOlapScan[max_t] +--------PhysicalOlapScan[max_t] -- !groupby_pushdown_function_processed_columns -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[max_t] -------------PhysicalOlapScan[max_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[max_t] +--------PhysicalOlapScan[max_t] -- !groupby_pushdown_nested_queries -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------filter((max_t.id < 100)) +----------PhysicalOlapScan[max_t] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------filter((max_t.id < 100)) ---------------PhysicalOlapScan[max_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((max_t.score > 20) and (t1.id < 100)) -----------------PhysicalOlapScan[max_t] +----------filter((max_t.score > 20) and (t1.id < 100)) +------------PhysicalOlapScan[max_t] diff --git a/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_min_through_join.out b/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_min_through_join.out index a4cc06ff143459f..a0a2acd9449a0e6 100644 --- a/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_min_through_join.out +++ b/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_min_through_join.out @@ -1,290 +1,237 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !groupby_pushdown_basic -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[min_t] -------------PhysicalOlapScan[min_t] +----------PhysicalOlapScan[min_t] +--------PhysicalOlapScan[min_t] -- !groupby_pushdown_left_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[min_t] -------------PhysicalOlapScan[min_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[min_t] +--------PhysicalOlapScan[min_t] -- !groupby_pushdown_right_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[min_t] -------------PhysicalOlapScan[min_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[min_t] +--------PhysicalOlapScan[min_t] -- !groupby_pushdown_full_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[min_t] -------------PhysicalOlapScan[min_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[min_t] +--------PhysicalOlapScan[min_t] -- !groupby_pushdown_left_semi_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[min_t] -------------PhysicalOlapScan[min_t] +----------PhysicalOlapScan[min_t] +--------PhysicalOlapScan[min_t] -- !groupby_pushdown_left_anti_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[min_t] -------------PhysicalOlapScan[min_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[min_t] +--------PhysicalOlapScan[min_t] -- !groupby_pushdown_complex_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.name < t2.name)) -------------PhysicalOlapScan[min_t] -------------PhysicalOlapScan[min_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.name < t2.name)) +--------PhysicalOlapScan[min_t] +--------PhysicalOlapScan[min_t] -- !groupby_pushdown_with_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[min_t] -------------PhysicalOlapScan[min_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[min_t] +--------PhysicalOlapScan[min_t] -- !groupby_pushdown_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[min_t] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------PhysicalOlapScan[min_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((min_t.score > 10)) -----------------PhysicalOlapScan[min_t] +----------filter((min_t.score > 10)) +------------PhysicalOlapScan[min_t] -- !groupby_pushdown_outer_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[min_t] -------------PhysicalOlapScan[min_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[min_t] +--------PhysicalOlapScan[min_t] -- !groupby_pushdown_deep_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[min_t] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------PhysicalOlapScan[min_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((min_t.score > 10)) -----------------PhysicalOlapScan[min_t] +----------filter((min_t.score > 10)) +------------PhysicalOlapScan[min_t] -- !groupby_pushdown_having -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter((min(score) > 100)) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] +--filter((min(score) > 100)) +----hashAgg[GLOBAL] +------hashAgg[LOCAL] +--------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ----------hashAgg[LOCAL] -------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------hashAgg[LOCAL] -----------------PhysicalOlapScan[min_t] ---------------PhysicalOlapScan[min_t] +------------PhysicalOlapScan[min_t] +----------PhysicalOlapScan[min_t] -- !groupby_pushdown_mixed_aggregates -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[min_t] -------------PhysicalOlapScan[min_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[min_t] +--------PhysicalOlapScan[min_t] -- !groupby_pushdown_multi_table_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[LOCAL] -------hashJoin[INNER_JOIN] hashCondition=((t1.name = t3.name)) otherCondition=() ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] +--hashAgg[LOCAL] +----hashJoin[INNER_JOIN] hashCondition=((t1.name = t3.name)) otherCondition=() +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[min_t] -----------------PhysicalOlapScan[min_t] ---------PhysicalDistribute[DistributionSpecHash] -----------PhysicalOlapScan[min_t] +--------------PhysicalOlapScan[min_t] +------------PhysicalOlapScan[min_t] +------PhysicalOlapScan[min_t] -- !groupby_pushdown_with_order_by -- PhysicalResultSink --PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] +----PhysicalQuickSort[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[min_t] -----------------PhysicalOlapScan[min_t] +--------------PhysicalOlapScan[min_t] +------------PhysicalOlapScan[min_t] -- !groupby_pushdown_multiple_equal_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[min_t] -------------PhysicalOlapScan[min_t] +----------PhysicalOlapScan[min_t] +--------PhysicalOlapScan[min_t] -- !groupby_pushdown_equal_conditions_with_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------PhysicalOlapScan[min_t] -------------PhysicalOlapScan[min_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() +--------PhysicalOlapScan[min_t] +--------PhysicalOlapScan[min_t] -- !groupby_pushdown_equal_conditions_non_aggregate_selection -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[min_t] -------------PhysicalOlapScan[min_t] +----------PhysicalOlapScan[min_t] +--------PhysicalOlapScan[min_t] -- !groupby_pushdown_equal_conditions_non_aggregate_selection_with_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[min_t] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[min_t] +----------PhysicalOlapScan[min_t] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[min_t] -- !groupby_pushdown_with_where_clause -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[min_t] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------PhysicalOlapScan[min_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((t1.score > 50)) -----------------PhysicalOlapScan[min_t] +----------filter((t1.score > 50)) +------------PhysicalOlapScan[min_t] -- !groupby_pushdown_varied_aggregates -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[min_t] -------------PhysicalOlapScan[min_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[min_t] +--------PhysicalOlapScan[min_t] -- !groupby_pushdown_with_order_by_limit -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[min_t] -----------------PhysicalOlapScan[min_t] +--------------PhysicalOlapScan[min_t] +------------PhysicalOlapScan[min_t] -- !groupby_pushdown_alias_multiple_equal_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1_alias.id = t2_alias.id) and (t1_alias.name = t2_alias.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1_alias.id = t2_alias.id) and (t1_alias.name = t2_alias.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[min_t] -------------PhysicalOlapScan[min_t] +----------PhysicalOlapScan[min_t] +--------PhysicalOlapScan[min_t] -- !groupby_pushdown_complex_join_condition -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.score = t2.score)) otherCondition=(( not (name = name))) -------------PhysicalOlapScan[min_t] -------------PhysicalOlapScan[min_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.score = t2.score)) otherCondition=(( not (name = name))) +--------PhysicalOlapScan[min_t] +--------PhysicalOlapScan[min_t] -- !groupby_pushdown_function_processed_columns -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[min_t] -------------PhysicalOlapScan[min_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[min_t] +--------PhysicalOlapScan[min_t] -- !groupby_pushdown_nested_queries -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------filter((min_t.id < 100)) +----------PhysicalOlapScan[min_t] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------filter((min_t.id < 100)) ---------------PhysicalOlapScan[min_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((min_t.score > 20) and (t1.id < 100)) -----------------PhysicalOlapScan[min_t] +----------filter((min_t.score > 20) and (t1.id < 100)) +------------PhysicalOlapScan[min_t] diff --git a/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_sum_through_join.out b/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_sum_through_join.out index 1e497ebd80e66dd..da05df5419d6285 100644 --- a/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_sum_through_join.out +++ b/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_sum_through_join.out @@ -1,303 +1,250 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !groupby_pushdown_basic -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[sum_t] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[sum_t] +----------PhysicalOlapScan[sum_t] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[sum_t] -- !groupby_pushdown_left_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[sum_t] -------------PhysicalOlapScan[sum_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t] +--------PhysicalOlapScan[sum_t] -- !groupby_pushdown_right_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[sum_t] -------------PhysicalOlapScan[sum_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t] +--------PhysicalOlapScan[sum_t] -- !groupby_pushdown_full_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[sum_t] -------------PhysicalOlapScan[sum_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t] +--------PhysicalOlapScan[sum_t] -- !groupby_pushdown_left_semi_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[sum_t] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[sum_t] +----------PhysicalOlapScan[sum_t] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[sum_t] -- !groupby_pushdown_left_anti_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[sum_t] -------------PhysicalOlapScan[sum_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t] +--------PhysicalOlapScan[sum_t] -- !groupby_pushdown_complex_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.name < t2.name)) -------------PhysicalOlapScan[sum_t] -------------PhysicalOlapScan[sum_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.name < t2.name)) +--------PhysicalOlapScan[sum_t] +--------PhysicalOlapScan[sum_t] -- !groupby_pushdown_with_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[sum_t] -------------PhysicalOlapScan[sum_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t] +--------PhysicalOlapScan[sum_t] -- !groupby_pushdown_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[sum_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((sum_t.score > 10)) -----------------PhysicalOlapScan[sum_t] +----------PhysicalOlapScan[sum_t] +--------hashAgg[LOCAL] +----------filter((sum_t.score > 10)) +------------PhysicalOlapScan[sum_t] -- !groupby_pushdown_outer_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[sum_t] -------------PhysicalOlapScan[sum_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t] +--------PhysicalOlapScan[sum_t] -- !groupby_pushdown_deep_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[sum_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((sum_t.score > 10)) -----------------PhysicalOlapScan[sum_t] +----------PhysicalOlapScan[sum_t] +--------hashAgg[LOCAL] +----------filter((sum_t.score > 10)) +------------PhysicalOlapScan[sum_t] -- !groupby_pushdown_having -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter((sum(score) > 100)) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] +--filter((sum(score) > 100)) +----hashAgg[GLOBAL] +------hashAgg[LOCAL] +--------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ----------hashAgg[LOCAL] -------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------hashAgg[LOCAL] -----------------PhysicalOlapScan[sum_t] ---------------hashAgg[LOCAL] -----------------PhysicalOlapScan[sum_t] +------------PhysicalOlapScan[sum_t] +----------hashAgg[LOCAL] +------------PhysicalOlapScan[sum_t] -- !groupby_pushdown_mixed_aggregates -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[sum_t] -------------PhysicalOlapScan[sum_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t] +--------PhysicalOlapScan[sum_t] -- !groupby_pushdown_multi_table_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[LOCAL] -------hashJoin[INNER_JOIN] hashCondition=((t1.name = t3.name)) otherCondition=() ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] +--hashAgg[LOCAL] +----hashJoin[INNER_JOIN] hashCondition=((t1.name = t3.name)) otherCondition=() +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[sum_t] -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[sum_t] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] +--------------PhysicalOlapScan[sum_t] ------------hashAgg[LOCAL] --------------PhysicalOlapScan[sum_t] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[sum_t] -- !groupby_pushdown_with_order_by -- PhysicalResultSink --PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[sum_t] -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[sum_t] - --- !groupby_pushdown_multiple_equal_conditions -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +----PhysicalQuickSort[LOCAL_SORT] +------hashAgg[GLOBAL] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] --------------PhysicalOlapScan[sum_t] ------------hashAgg[LOCAL] --------------PhysicalOlapScan[sum_t] --- !groupby_pushdown_equal_conditions_with_aggregate -- +-- !groupby_pushdown_multiple_equal_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------PhysicalOlapScan[sum_t] -------------PhysicalOlapScan[sum_t] +----------PhysicalOlapScan[sum_t] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[sum_t] + +-- !groupby_pushdown_equal_conditions_with_aggregate -- +PhysicalResultSink +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() +--------PhysicalOlapScan[sum_t] +--------PhysicalOlapScan[sum_t] -- !groupby_pushdown_equal_conditions_non_aggregate_selection -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[sum_t] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[sum_t] +----------PhysicalOlapScan[sum_t] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[sum_t] -- !groupby_pushdown_equal_conditions_non_aggregate_selection_with_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------PhysicalOlapScan[sum_t] -------------PhysicalOlapScan[sum_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() +--------PhysicalOlapScan[sum_t] +--------PhysicalOlapScan[sum_t] -- !groupby_pushdown_with_where_clause -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[sum_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((t1.score > 50)) -----------------PhysicalOlapScan[sum_t] +----------PhysicalOlapScan[sum_t] +--------hashAgg[LOCAL] +----------filter((t1.score > 50)) +------------PhysicalOlapScan[sum_t] -- !groupby_pushdown_varied_aggregates -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[sum_t] -------------PhysicalOlapScan[sum_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t] +--------PhysicalOlapScan[sum_t] -- !groupby_pushdown_with_order_by_limit -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[sum_t] -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[sum_t] - --- !groupby_pushdown_alias_multiple_equal_conditions -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1_alias.id = t2_alias.id) and (t1_alias.name = t2_alias.name)) otherCondition=() +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] --------------PhysicalOlapScan[sum_t] ------------hashAgg[LOCAL] --------------PhysicalOlapScan[sum_t] --- !groupby_pushdown_complex_join_condition -- +-- !groupby_pushdown_alias_multiple_equal_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1_alias.id = t2_alias.id) and (t1_alias.name = t2_alias.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.score = t2.score)) otherCondition=(( not (name = name))) -------------PhysicalOlapScan[sum_t] -------------PhysicalOlapScan[sum_t] +----------PhysicalOlapScan[sum_t] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[sum_t] + +-- !groupby_pushdown_complex_join_condition -- +PhysicalResultSink +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.score = t2.score)) otherCondition=(( not (name = name))) +--------PhysicalOlapScan[sum_t] +--------PhysicalOlapScan[sum_t] -- !groupby_pushdown_function_processed_columns -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[sum_t] -------------PhysicalOlapScan[sum_t] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t] +--------PhysicalOlapScan[sum_t] -- !groupby_pushdown_nested_queries -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------hashAgg[LOCAL] ---------------filter((sum_t.id < 100)) -----------------PhysicalOlapScan[sum_t] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((sum_t.score > 20) and (t1.id < 100)) -----------------PhysicalOlapScan[sum_t] +----------filter((sum_t.id < 100)) +------------PhysicalOlapScan[sum_t] +--------hashAgg[LOCAL] +----------filter((sum_t.score > 20) and (t1.id < 100)) +------------PhysicalOlapScan[sum_t] diff --git a/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_sum_through_join_one_side.out b/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_sum_through_join_one_side.out index 120d293e37bd7ec..8046cec6d95b309 100644 --- a/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_sum_through_join_one_side.out +++ b/regression-test/data/nereids_rules_p0/eager_aggregate/push_down_sum_through_join_one_side.out @@ -1,290 +1,237 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !groupby_pushdown_basic -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[sum_t_one_side] -------------PhysicalOlapScan[sum_t_one_side] +----------PhysicalOlapScan[sum_t_one_side] +--------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_left_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[sum_t_one_side] -------------PhysicalOlapScan[sum_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t_one_side] +--------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_right_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[sum_t_one_side] -------------PhysicalOlapScan[sum_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t_one_side] +--------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_full_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[sum_t_one_side] -------------PhysicalOlapScan[sum_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t_one_side] +--------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_left_semi_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[sum_t_one_side] -------------PhysicalOlapScan[sum_t_one_side] +----------PhysicalOlapScan[sum_t_one_side] +--------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_left_anti_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[sum_t_one_side] -------------PhysicalOlapScan[sum_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t_one_side] +--------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_complex_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.name < t2.name)) -------------PhysicalOlapScan[sum_t_one_side] -------------PhysicalOlapScan[sum_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.name < t2.name)) +--------PhysicalOlapScan[sum_t_one_side] +--------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_with_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[sum_t_one_side] -------------PhysicalOlapScan[sum_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t_one_side] +--------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t_one_side] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------PhysicalOlapScan[sum_t_one_side] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((sum_t_one_side.score > 10)) -----------------PhysicalOlapScan[sum_t_one_side] +----------filter((sum_t_one_side.score > 10)) +------------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_outer_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[sum_t_one_side] -------------PhysicalOlapScan[sum_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t_one_side] +--------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_deep_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t_one_side] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------PhysicalOlapScan[sum_t_one_side] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((sum_t_one_side.score > 10)) -----------------PhysicalOlapScan[sum_t_one_side] +----------filter((sum_t_one_side.score > 10)) +------------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_having -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter((sum(score) > 100)) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] +--filter((sum(score) > 100)) +----hashAgg[GLOBAL] +------hashAgg[LOCAL] +--------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ----------hashAgg[LOCAL] -------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------hashAgg[LOCAL] -----------------PhysicalOlapScan[sum_t_one_side] ---------------PhysicalOlapScan[sum_t_one_side] +------------PhysicalOlapScan[sum_t_one_side] +----------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_mixed_aggregates -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[sum_t_one_side] -------------PhysicalOlapScan[sum_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t_one_side] +--------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_multi_table_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[LOCAL] -------hashJoin[INNER_JOIN] hashCondition=((t1.name = t3.name)) otherCondition=() ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] +--hashAgg[LOCAL] +----hashJoin[INNER_JOIN] hashCondition=((t1.name = t3.name)) otherCondition=() +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[sum_t_one_side] -----------------PhysicalOlapScan[sum_t_one_side] ---------PhysicalDistribute[DistributionSpecHash] -----------PhysicalOlapScan[sum_t_one_side] +--------------PhysicalOlapScan[sum_t_one_side] +------------PhysicalOlapScan[sum_t_one_side] +------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_with_order_by -- PhysicalResultSink --PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] +----PhysicalQuickSort[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[sum_t_one_side] -----------------PhysicalOlapScan[sum_t_one_side] +--------------PhysicalOlapScan[sum_t_one_side] +------------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_multiple_equal_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[sum_t_one_side] -------------PhysicalOlapScan[sum_t_one_side] +----------PhysicalOlapScan[sum_t_one_side] +--------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_equal_conditions_with_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------PhysicalOlapScan[sum_t_one_side] -------------PhysicalOlapScan[sum_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() +--------PhysicalOlapScan[sum_t_one_side] +--------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_equal_conditions_non_aggregate_selection -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[sum_t_one_side] -------------PhysicalOlapScan[sum_t_one_side] +----------PhysicalOlapScan[sum_t_one_side] +--------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_equal_conditions_non_aggregate_selection_with_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[sum_t_one_side] -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[sum_t_one_side] +----------PhysicalOlapScan[sum_t_one_side] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_with_where_clause -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t_one_side] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------PhysicalOlapScan[sum_t_one_side] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((t1.score > 50)) -----------------PhysicalOlapScan[sum_t_one_side] +----------filter((t1.score > 50)) +------------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_varied_aggregates -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[sum_t_one_side] -------------PhysicalOlapScan[sum_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t_one_side] +--------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_with_order_by_limit -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------hashAgg[LOCAL] ---------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------hashAgg[LOCAL] -------------------PhysicalOlapScan[sum_t_one_side] -----------------PhysicalOlapScan[sum_t_one_side] +--------------PhysicalOlapScan[sum_t_one_side] +------------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_alias_multiple_equal_conditions -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1_alias.id = t2_alias.id) and (t1_alias.name = t2_alias.name)) otherCondition=() --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1_alias.id = t2_alias.id) and (t1_alias.name = t2_alias.name)) otherCondition=() -------------hashAgg[LOCAL] ---------------PhysicalOlapScan[sum_t_one_side] -------------PhysicalOlapScan[sum_t_one_side] +----------PhysicalOlapScan[sum_t_one_side] +--------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_complex_join_condition -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.score = t2.score)) otherCondition=(( not (name = name))) -------------PhysicalOlapScan[sum_t_one_side] -------------PhysicalOlapScan[sum_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.score = t2.score)) otherCondition=(( not (name = name))) +--------PhysicalOlapScan[sum_t_one_side] +--------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_function_processed_columns -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[sum_t_one_side] -------------PhysicalOlapScan[sum_t_one_side] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[sum_t_one_side] +--------PhysicalOlapScan[sum_t_one_side] -- !groupby_pushdown_nested_queries -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------filter((sum_t_one_side.id < 100)) +----------PhysicalOlapScan[sum_t_one_side] --------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------filter((sum_t_one_side.id < 100)) ---------------PhysicalOlapScan[sum_t_one_side] apply RFs: RF0 -------------hashAgg[LOCAL] ---------------filter((sum_t_one_side.score > 20) and (t1.id < 100)) -----------------PhysicalOlapScan[sum_t_one_side] +----------filter((sum_t_one_side.score > 20) and (t1.id < 100)) +------------PhysicalOlapScan[sum_t_one_side] diff --git a/regression-test/data/nereids_rules_p0/eliminate_join_condition/eliminate_join_condition.out b/regression-test/data/nereids_rules_p0/eliminate_join_condition/eliminate_join_condition.out index a673508edba4b51..4c765dcf37faf57 100644 --- a/regression-test/data/nereids_rules_p0/eliminate_join_condition/eliminate_join_condition.out +++ b/regression-test/data/nereids_rules_p0/eliminate_join_condition/eliminate_join_condition.out @@ -1,40 +1,31 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !inner_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----NestedLoopJoin[CROSS_JOIN] -------PhysicalOlapScan[t] -------PhysicalDistribute[DistributionSpecReplicated] ---------PhysicalOlapScan[t] +--NestedLoopJoin[CROSS_JOIN] +----PhysicalOlapScan[t] +----PhysicalOlapScan[t] -- !left_outer_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----NestedLoopJoin[LEFT_OUTER_JOIN] -------PhysicalOlapScan[t] -------PhysicalDistribute[DistributionSpecReplicated] ---------PhysicalOlapScan[t] +--NestedLoopJoin[LEFT_OUTER_JOIN] +----PhysicalOlapScan[t] +----PhysicalOlapScan[t] -- !right_outer_join -- PhysicalResultSink --NestedLoopJoin[RIGHT_OUTER_JOIN] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalOlapScan[t] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalOlapScan[t] +----PhysicalOlapScan[t] +----PhysicalOlapScan[t] -- !full_outer_join -- PhysicalResultSink --NestedLoopJoin[FULL_OUTER_JOIN] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalOlapScan[t] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalOlapScan[t] +----PhysicalOlapScan[t] +----PhysicalOlapScan[t] -- !left_semi_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----PhysicalOlapScan[t] +--PhysicalOlapScan[t] -- !left_anti_join -- PhysicalResultSink @@ -43,18 +34,14 @@ PhysicalResultSink -- !right_semi_join -- PhysicalResultSink --NestedLoopJoin[RIGHT_SEMI_JOIN] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalOlapScan[t] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalOlapScan[t] +----PhysicalOlapScan[t] +----PhysicalOlapScan[t] -- !right_anti_join -- PhysicalResultSink --NestedLoopJoin[RIGHT_ANTI_JOIN] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalOlapScan[t] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalOlapScan[t] +----PhysicalOlapScan[t] +----PhysicalOlapScan[t] -- !inner_join -- 1 1 a 1 1 a diff --git a/regression-test/data/nereids_rules_p0/eliminate_not_null/eliminate_not_null.out b/regression-test/data/nereids_rules_p0/eliminate_not_null/eliminate_not_null.out index 9c3c820d4413a66..a238db46ceca869 100644 --- a/regression-test/data/nereids_rules_p0/eliminate_not_null/eliminate_not_null.out +++ b/regression-test/data/nereids_rules_p0/eliminate_not_null/eliminate_not_null.out @@ -1,66 +1,55 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !eliminate_not_null_basic_comparison -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter(( not score IS NULL) and (t.score > 13)) -------PhysicalOlapScan[t] +--filter(( not score IS NULL) and (t.score > 13)) +----PhysicalOlapScan[t] -- !eliminate_not_null_in_clause -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter(( not id IS NULL) and id IN (1, 2, 3)) -------PhysicalOlapScan[t] +--filter(( not id IS NULL) and id IN (1, 2, 3)) +----PhysicalOlapScan[t] -- !eliminate_not_null_not_equal -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter(( not (score = 13)) and ( not score IS NULL)) -------PhysicalOlapScan[t] +--filter(( not (score = 13)) and ( not score IS NULL)) +----PhysicalOlapScan[t] -- !eliminate_not_null_string_function -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter(( not name IS NULL) and (length(name) > 0)) -------PhysicalOlapScan[t] +--filter(( not name IS NULL) and (length(name) > 0)) +----PhysicalOlapScan[t] -- !eliminate_not_null_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[LOCAL] -------filter(( not score IS NULL) and (t.score > 0)) ---------PhysicalOlapScan[t] +--hashAgg[LOCAL] +----filter(( not score IS NULL) and (t.score > 0)) +------PhysicalOlapScan[t] -- !eliminate_not_null_between -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter(( not score IS NULL) and (t.score <= 10) and (t.score >= 1)) -------PhysicalOlapScan[t] +--filter(( not score IS NULL) and (t.score <= 10) and (t.score >= 1)) +----PhysicalOlapScan[t] -- !eliminate_not_null_math_function -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter(( not score IS NULL) and (abs(score) = 5)) -------PhysicalOlapScan[t] +--filter(( not score IS NULL) and (abs(score) = 5)) +----PhysicalOlapScan[t] -- !eliminate_not_null_complex_logic -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter(( not score IS NULL) and ((t.score > 5) OR (t.id < 10))) -------PhysicalOlapScan[t] +--filter(( not score IS NULL) and ((t.score > 5) OR (t.id < 10))) +----PhysicalOlapScan[t] -- !eliminate_not_null_date_function -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter(( not name IS NULL) and (year(cast(name as DATEV2)) = 2022)) -------PhysicalOlapScan[t] +--filter(( not name IS NULL) and (year(cast(name as DATEV2)) = 2022)) +----PhysicalOlapScan[t] -- !eliminate_not_null_with_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((t.score = t.score)) otherCondition=() build RFs:RF0 score->[score] -------filter(( not score IS NULL) and (t.score > 0)) ---------PhysicalOlapScan[t] apply RFs: RF0 -------PhysicalDistribute[DistributionSpecReplicated] ---------filter((t.score > 0)) -----------PhysicalOlapScan[t] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((t.score = t.score)) otherCondition=() +----filter(( not score IS NULL) and (t.score > 0)) +------PhysicalOlapScan[t] +----filter((t.score > 0)) +------PhysicalOlapScan[t] diff --git a/regression-test/data/nereids_rules_p0/eliminate_outer_join/eliminate_outer_join.out b/regression-test/data/nereids_rules_p0/eliminate_outer_join/eliminate_outer_join.out index 138eb0e62da5e9a..b714f8594f674bc 100644 --- a/regression-test/data/nereids_rules_p0/eliminate_outer_join/eliminate_outer_join.out +++ b/regression-test/data/nereids_rules_p0/eliminate_outer_join/eliminate_outer_join.out @@ -3,8 +3,8 @@ PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------PhysicalOlapScan[t] apply RFs: RF0 +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t] --------filter((t2.score > 10)) ----------PhysicalOlapScan[t] @@ -12,8 +12,8 @@ PhysicalResultSink PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------PhysicalOlapScan[t] apply RFs: RF0 +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t] --------filter((t1.score > 10)) ----------PhysicalOlapScan[t] @@ -21,8 +21,8 @@ PhysicalResultSink PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject -------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------PhysicalOlapScan[t] apply RFs: RF0 +------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t] --------filter((t1.score > 10)) ----------PhysicalOlapScan[t] @@ -30,8 +30,8 @@ PhysicalResultSink PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject -------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------PhysicalOlapScan[t] apply RFs: RF0 +------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t] --------filter((t2.score > 10)) ----------PhysicalOlapScan[t] @@ -39,9 +39,9 @@ PhysicalResultSink PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------filter((t1.score > 10)) -----------PhysicalOlapScan[t] apply RFs: RF0 +----------PhysicalOlapScan[t] --------filter((t2.score > 10)) ----------PhysicalOlapScan[t] @@ -49,9 +49,9 @@ PhysicalResultSink PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------filter((t1.score > 10)) -----------PhysicalOlapScan[t] apply RFs: RF0 +----------PhysicalOlapScan[t] --------filter((t2.score > 10)) ----------PhysicalOlapScan[t] @@ -60,8 +60,8 @@ PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject ------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() ---------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----------PhysicalOlapScan[t] apply RFs: RF0 +--------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[t] ----------filter((t1.score > 10)) ------------PhysicalOlapScan[t] --------PhysicalOlapScan[t] @@ -71,8 +71,8 @@ PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject ------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----------PhysicalOlapScan[t] apply RFs: RF0 +--------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[t] ----------filter((t2.score > 10)) ------------PhysicalOlapScan[t] --------PhysicalOlapScan[t] @@ -81,10 +81,10 @@ PhysicalResultSink PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() build RFs:RF1 id->[id] ---------PhysicalOlapScan[t] apply RFs: RF1 ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----------PhysicalOlapScan[t] apply RFs: RF0 +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +--------PhysicalOlapScan[t] +--------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[t] ----------filter((t1.score > 10)) ------------PhysicalOlapScan[t] @@ -92,10 +92,10 @@ PhysicalResultSink PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() build RFs:RF1 id->[id] ---------PhysicalOlapScan[t] apply RFs: RF1 ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----------PhysicalOlapScan[t] apply RFs: RF0 +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +--------PhysicalOlapScan[t] +--------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[t] ----------filter((t2.score > 10)) ------------PhysicalOlapScan[t] @@ -104,8 +104,8 @@ PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject ------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() ---------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----------PhysicalOlapScan[t] apply RFs: RF0 +--------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[t] ----------filter((t1.score > 10)) ------------PhysicalOlapScan[t] --------PhysicalOlapScan[t] @@ -115,8 +115,8 @@ PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject ------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() ---------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----------PhysicalOlapScan[t] apply RFs: RF0 +--------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[t] ----------filter((t2.score > 10)) ------------PhysicalOlapScan[t] --------PhysicalDistribute[DistributionSpecReplicated] @@ -126,8 +126,8 @@ PhysicalResultSink PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject -------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------PhysicalOlapScan[t] apply RFs: RF0 +------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t] --------filter(( not id IS NULL) and (t1.score > 5)) ----------PhysicalOlapScan[t] @@ -135,8 +135,8 @@ PhysicalResultSink PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject -------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------PhysicalOlapScan[t] apply RFs: RF0 +------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t] --------filter(( not id IS NULL) and (t2.score > 5)) ----------PhysicalOlapScan[t] @@ -153,9 +153,9 @@ PhysicalResultSink PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject -------hashJoin[INNER_JOIN] hashCondition=((t2.id = t3.id)) otherCondition=() build RFs:RF1 id->[id] ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----------PhysicalOlapScan[t] apply RFs: RF0 RF1 +------hashJoin[INNER_JOIN] hashCondition=((t2.id = t3.id)) otherCondition=() +--------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[t] ----------filter((t1.score > 5)) ------------PhysicalOlapScan[t] --------filter(( not score IS NULL)) @@ -199,9 +199,9 @@ PhysicalResultSink PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------filter(( not name IS NULL)) -----------PhysicalOlapScan[t] apply RFs: RF0 +----------PhysicalOlapScan[t] --------filter((t1.score > 10)) ----------PhysicalOlapScan[t] @@ -209,9 +209,9 @@ PhysicalResultSink PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------filter(( not name IS NULL)) -----------PhysicalOlapScan[t] apply RFs: RF0 +----------PhysicalOlapScan[t] --------filter((t1.score > 10)) ----------PhysicalOlapScan[t] @@ -219,9 +219,9 @@ PhysicalResultSink PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() --------filter(( not name IS NULL)) -----------PhysicalOlapScan[t] apply RFs: RF0 +----------PhysicalOlapScan[t] --------filter((t1.score > 10)) ----------PhysicalOlapScan[t] @@ -229,8 +229,8 @@ PhysicalResultSink PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t1_alias.id)) otherCondition=() build RFs:RF0 id->[id] ---------PhysicalOlapScan[t] apply RFs: RF0 +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t1_alias.id)) otherCondition=() +--------PhysicalOlapScan[t] --------filter((t1_alias.name > '2023-01-01')) ----------PhysicalOlapScan[t] @@ -262,8 +262,8 @@ PhysicalResultSink PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------PhysicalOlapScan[t] apply RFs: RF0 +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t] --------PhysicalProject ----------filter((t2.score > 20)) ------------PhysicalOlapScan[t] @@ -296,8 +296,8 @@ PhysicalResultSink PhysicalResultSink --PhysicalDistribute[DistributionSpecGather] ----PhysicalProject -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------PhysicalOlapScan[t] apply RFs: RF0 +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t] --------filter(( not name IS NULL)) ----------PhysicalOlapScan[t] diff --git a/regression-test/data/nereids_rules_p0/filter_push_down/push_down_filter_other_condition.out b/regression-test/data/nereids_rules_p0/filter_push_down/push_down_filter_other_condition.out index 9c5aa03f4dcdba7..affd081e0716a48 100644 --- a/regression-test/data/nereids_rules_p0/filter_push_down/push_down_filter_other_condition.out +++ b/regression-test/data/nereids_rules_p0/filter_push_down/push_down_filter_other_condition.out @@ -1,333 +1,253 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !pushdown_inner_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------filter((t1.id > 1)) ---------PhysicalOlapScan[t1] apply RFs: RF0 -------PhysicalDistribute[DistributionSpecHash] ---------filter((t2.id > 1)) -----------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((t1.id > 1)) +------PhysicalOlapScan[t1] +----filter((t2.id > 1)) +------PhysicalOlapScan[t2] -- !pushdown_left_semi_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------filter((t1.id > 1)) ---------PhysicalOlapScan[t1] apply RFs: RF0 -------PhysicalDistribute[DistributionSpecHash] ---------filter((t2.id > 1)) -----------PhysicalOlapScan[t2] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((t1.id > 1)) +------PhysicalOlapScan[t1] +----filter((t2.id > 1)) +------PhysicalOlapScan[t2] -- !pushdown_right_semi_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[RIGHT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------filter((t1.id > 1)) ---------PhysicalOlapScan[t1] apply RFs: RF0 -------PhysicalDistribute[DistributionSpecHash] ---------filter((t2.id > 1)) -----------PhysicalOlapScan[t2] +--hashJoin[RIGHT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((t1.id > 1)) +------PhysicalOlapScan[t1] +----filter((t2.id > 1)) +------PhysicalOlapScan[t2] -- !pushdown_left_outer_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.id > 1)) -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------filter((t2.id > 1)) -----------PhysicalOlapScan[t2] +--hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.id > 1)) +----PhysicalOlapScan[t1] +----filter((t2.id > 1)) +------PhysicalOlapScan[t2] -- !pushdown_right_outer_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------PhysicalDistribute[DistributionSpecHash] ---------filter((t1.id > 1)) -----------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((t1.id > 1)) +------PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_full_outer_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.id > 1)) -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.id > 1)) +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_left_anti_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.id > 1)) -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------filter((t2.id > 1)) -----------PhysicalOlapScan[t2] +--hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.id > 1)) +----PhysicalOlapScan[t1] +----filter((t2.id > 1)) +------PhysicalOlapScan[t2] -- !pushdown_right_anti_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[RIGHT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------PhysicalDistribute[DistributionSpecHash] ---------filter((t1.id > 1)) -----------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[RIGHT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((t1.id > 1)) +------PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_cross_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------filter((t1.id > 1)) ---------PhysicalOlapScan[t1] apply RFs: RF0 -------PhysicalDistribute[DistributionSpecHash] ---------filter((t2.id > 1)) -----------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((t1.id > 1)) +------PhysicalOlapScan[t1] +----filter((t2.id > 1)) +------PhysicalOlapScan[t2] -- !pushdown_inner_join_combined -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t2.id = t3.id)) otherCondition=() build RFs:RF1 id->[id] -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------filter((t1.id < 10) and (t1.id > 1)) -----------PhysicalOlapScan[t1] apply RFs: RF0 ---------PhysicalDistribute[DistributionSpecHash] -----------filter((t2.id < 10) and (t2.id > 1)) -------------PhysicalOlapScan[t2] apply RFs: RF1 -------PhysicalDistribute[DistributionSpecHash] ---------filter((t3.id < 10) and (t3.id > 1)) -----------PhysicalOlapScan[t3] +--hashJoin[INNER_JOIN] hashCondition=((t2.id = t3.id)) otherCondition=() +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id < 10) and (t1.id > 1)) +--------PhysicalOlapScan[t1] +------filter((t2.id < 10) and (t2.id > 1)) +--------PhysicalOlapScan[t2] +----filter((t3.id < 10) and (t3.id > 1)) +------PhysicalOlapScan[t3] -- !pushdown_left_semi_join_combined -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() build RFs:RF1 id->[id] -------hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------filter((t1.id < 10) and (t1.id > 1)) -----------PhysicalOlapScan[t1] apply RFs: RF0 RF1 ---------PhysicalDistribute[DistributionSpecHash] -----------filter((t2.id < 10) and (t2.id > 1)) -------------PhysicalOlapScan[t2] -------PhysicalDistribute[DistributionSpecHash] ---------filter((t3.id < 10) and (t3.id > 1)) -----------PhysicalOlapScan[t3] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +----hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id < 10) and (t1.id > 1)) +--------PhysicalOlapScan[t1] +------filter((t2.id < 10) and (t2.id > 1)) +--------PhysicalOlapScan[t2] +----filter((t3.id < 10) and (t3.id > 1)) +------PhysicalOlapScan[t3] -- !pushdown_right_semi_join_combined -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t2.id = t3.id)) otherCondition=() build RFs:RF1 id->[id] -------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------filter((t1.id < 10) and (t1.id > 1)) -----------PhysicalOlapScan[t1] apply RFs: RF0 ---------PhysicalDistribute[DistributionSpecHash] -----------filter((t2.id < 10) and (t2.id > 1)) -------------PhysicalOlapScan[t2] apply RFs: RF1 -------PhysicalDistribute[DistributionSpecHash] ---------filter((t3.id < 10) and (t3.id > 1)) -----------PhysicalOlapScan[t3] +--hashJoin[INNER_JOIN] hashCondition=((t2.id = t3.id)) otherCondition=() +----hashJoin[RIGHT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id < 10) and (t1.id > 1)) +--------PhysicalOlapScan[t1] +------filter((t2.id < 10) and (t2.id > 1)) +--------PhysicalOlapScan[t2] +----filter((t3.id < 10) and (t3.id > 1)) +------PhysicalOlapScan[t3] -- !pushdown_left_outer_join_combined -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t2.id = t3.id)) otherCondition=() build RFs:RF1 id->[id] -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------filter((t1.id < 10) and (t1.id > 1)) -----------PhysicalOlapScan[t1] apply RFs: RF0 ---------PhysicalDistribute[DistributionSpecHash] -----------filter((t2.id < 10) and (t2.id > 1)) -------------PhysicalOlapScan[t2] apply RFs: RF1 -------PhysicalDistribute[DistributionSpecHash] ---------filter((t3.id < 10) and (t3.id > 1)) -----------PhysicalOlapScan[t3] +--hashJoin[INNER_JOIN] hashCondition=((t2.id = t3.id)) otherCondition=() +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id < 10) and (t1.id > 1)) +--------PhysicalOlapScan[t1] +------filter((t2.id < 10) and (t2.id > 1)) +--------PhysicalOlapScan[t2] +----filter((t3.id < 10) and (t3.id > 1)) +------PhysicalOlapScan[t3] -- !pushdown_right_outer_join_combined -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t2.id = t3.id)) otherCondition=() build RFs:RF1 id->[id] -------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------PhysicalDistribute[DistributionSpecHash] -----------filter((t1.id < 10) and (t1.id > 1)) -------------PhysicalOlapScan[t1] apply RFs: RF0 ---------PhysicalDistribute[DistributionSpecHash] -----------filter((t2.id < 10)) -------------PhysicalOlapScan[t2] apply RFs: RF1 -------PhysicalDistribute[DistributionSpecHash] ---------filter((t3.id < 10)) -----------PhysicalOlapScan[t3] +--hashJoin[INNER_JOIN] hashCondition=((t2.id = t3.id)) otherCondition=() +----hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id < 10) and (t1.id > 1)) +--------PhysicalOlapScan[t1] +------filter((t2.id < 10)) +--------PhysicalOlapScan[t2] +----filter((t3.id < 10)) +------PhysicalOlapScan[t3] -- !pushdown_full_outer_join_combined -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t2.id = t3.id)) otherCondition=() build RFs:RF1 id->[id] -------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------PhysicalDistribute[DistributionSpecHash] -----------filter((t1.id < 10) and (t1.id > 1)) -------------PhysicalOlapScan[t1] apply RFs: RF0 ---------PhysicalDistribute[DistributionSpecHash] -----------filter((t2.id < 10)) -------------PhysicalOlapScan[t2] apply RFs: RF1 -------PhysicalDistribute[DistributionSpecHash] ---------filter((t3.id < 10)) -----------PhysicalOlapScan[t3] +--hashJoin[INNER_JOIN] hashCondition=((t2.id = t3.id)) otherCondition=() +----hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id < 10) and (t1.id > 1)) +--------PhysicalOlapScan[t1] +------filter((t2.id < 10)) +--------PhysicalOlapScan[t2] +----filter((t3.id < 10)) +------PhysicalOlapScan[t3] -- !pushdown_left_anti_join_combined -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() build RFs:RF0 id->[id] -------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.id > 1)) ---------filter((t1.id < 10)) -----------PhysicalOlapScan[t1] apply RFs: RF0 ---------PhysicalDistribute[DistributionSpecHash] -----------filter((t2.id < 10) and (t2.id > 1)) -------------PhysicalOlapScan[t2] -------PhysicalDistribute[DistributionSpecHash] ---------filter((t3.id < 10)) -----------PhysicalOlapScan[t3] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +----hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.id > 1)) +------filter((t1.id < 10)) +--------PhysicalOlapScan[t1] +------filter((t2.id < 10) and (t2.id > 1)) +--------PhysicalOlapScan[t2] +----filter((t3.id < 10)) +------PhysicalOlapScan[t3] -- !pushdown_right_anti_join_combined -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t2.id = t3.id)) otherCondition=() build RFs:RF1 id->[id] -------hashJoin[RIGHT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------PhysicalDistribute[DistributionSpecHash] -----------filter((t1.id < 10) and (t1.id > 1)) -------------PhysicalOlapScan[t1] apply RFs: RF0 ---------PhysicalDistribute[DistributionSpecHash] -----------filter((t2.id < 10)) -------------PhysicalOlapScan[t2] apply RFs: RF1 -------PhysicalDistribute[DistributionSpecHash] ---------filter((t3.id < 10)) -----------PhysicalOlapScan[t3] +--hashJoin[INNER_JOIN] hashCondition=((t2.id = t3.id)) otherCondition=() +----hashJoin[RIGHT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id < 10) and (t1.id > 1)) +--------PhysicalOlapScan[t1] +------filter((t2.id < 10)) +--------PhysicalOlapScan[t2] +----filter((t3.id < 10)) +------PhysicalOlapScan[t3] -- !pushdown_cross_join_combined -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t2.id = t3.id)) otherCondition=() build RFs:RF0 id->[id] -------PhysicalDistribute[DistributionSpecHash] ---------NestedLoopJoin[CROSS_JOIN] -----------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecReplicated] -------------filter((t2.id < 10)) ---------------PhysicalOlapScan[t2] apply RFs: RF0 -------PhysicalDistribute[DistributionSpecHash] ---------filter((t3.id < 10)) -----------PhysicalOlapScan[t3] +--hashJoin[INNER_JOIN] hashCondition=((t2.id = t3.id)) otherCondition=() +----NestedLoopJoin[CROSS_JOIN] +------PhysicalOlapScan[t1] +------filter((t2.id < 10)) +--------PhysicalOlapScan[t2] +----filter((t3.id < 10)) +------PhysicalOlapScan[t3] -- !pushdown_null_aware_anti_join_combined -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[NULL_AWARE_LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecReplicated] ---------filter((t2.id > 0)) -----------PhysicalOlapScan[t2] +--hashJoin[NULL_AWARE_LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----PhysicalOlapScan[t1] +----filter((t2.id > 0)) +------PhysicalOlapScan[t2] -- !pushdown_inner_join_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((expr_cast(id as BIGINT) = sum(id))) otherCondition=() build RFs:RF0 sum(id)->[id] -------PhysicalDistribute[DistributionSpecHash] ---------filter((cast(id as BIGINT) = 1) and (t1.id = 1)) -----------PhysicalOlapScan[t1] apply RFs: RF0 -------PhysicalDistribute[DistributionSpecHash] ---------filter((sum(id) = 1)) -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((expr_cast(id as BIGINT) = sum(id))) otherCondition=() +----filter((cast(id as BIGINT) = 1) and (t1.id = 1)) +------PhysicalOlapScan[t1] +----filter((sum(id) = 1)) +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[t2] -- !pushdown_left_semi_join_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------filter((t1.id > 1)) ---------PhysicalOlapScan[t1] apply RFs: RF0 -------PhysicalDistribute[DistributionSpecHash] ---------filter((t2.id > 1)) -----------PhysicalOlapScan[t2] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((t1.id > 1)) +------PhysicalOlapScan[t1] +----filter((t2.id > 1)) +------PhysicalOlapScan[t2] -- !pushdown_left_outer_join_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter(((cast(id as BIGINT) = sum(id)) OR id IS NULL)) -------NestedLoopJoin[LEFT_OUTER_JOIN](t1.id = 1) ---------PhysicalOlapScan[t1] ---------PhysicalDistribute[DistributionSpecReplicated] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalOlapScan[t2] +--filter(((cast(id as BIGINT) = sum(id)) OR id IS NULL)) +----NestedLoopJoin[LEFT_OUTER_JOIN](t1.id = 1) +------PhysicalOlapScan[t1] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[t2] -- !pushdown_left_anti_join_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----NestedLoopJoin[LEFT_ANTI_JOIN](((t1.id = t2.id) OR id IS NULL) OR id IS NULL)(t1.id > 1) -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecReplicated] ---------PhysicalOlapScan[t2] +--NestedLoopJoin[LEFT_ANTI_JOIN](((t1.id = t2.id) OR id IS NULL) OR id IS NULL)(t1.id > 1) +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_cross_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----NestedLoopJoin[LEFT_SEMI_JOIN] -------filter((t1.id > 1)) ---------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecReplicated] ---------PhysicalOlapScan[t2] +--NestedLoopJoin[LEFT_SEMI_JOIN] +----filter((t1.id > 1)) +------PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_inner_join_subquery_outer -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------filter((t1.id > 1)) ---------PhysicalOlapScan[t1] apply RFs: RF0 -------PhysicalDistribute[DistributionSpecHash] ---------filter((t2.id > 1)) -----------PhysicalAssertNumRows -------------PhysicalDistribute[DistributionSpecGather] ---------------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((t1.id > 1)) +------PhysicalOlapScan[t1] +----filter((t2.id > 1)) +------PhysicalAssertNumRows +--------PhysicalOlapScan[t2] -- !pushdown_left_semi_join_subquery_outer -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------filter((t1.id > 1)) ---------PhysicalOlapScan[t1] apply RFs: RF0 -------PhysicalDistribute[DistributionSpecHash] ---------filter((t2.id > 1)) -----------PhysicalOlapScan[t2] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((t1.id > 1)) +------PhysicalOlapScan[t1] +----filter((t2.id > 1)) +------PhysicalOlapScan[t2] -- !pushdown_left_outer_join_subquery_outer -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----NestedLoopJoin[INNER_JOIN]((t1.id = t2.id) OR (id IS NULL AND (t1.id > 1))) -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecReplicated] ---------PhysicalAssertNumRows -----------PhysicalDistribute[DistributionSpecGather] -------------PhysicalOlapScan[t2] +--NestedLoopJoin[INNER_JOIN]((t1.id = t2.id) OR (id IS NULL AND (t1.id > 1))) +----PhysicalOlapScan[t1] +----PhysicalAssertNumRows +------PhysicalOlapScan[t2] -- !pushdown_left_anti_join_subquery_outer -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[NULL_AWARE_LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------filter((t1.id > 1)) ---------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecReplicated] ---------filter((t2.id > 1)) -----------PhysicalOlapScan[t2] +--hashJoin[NULL_AWARE_LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((t1.id > 1)) +------PhysicalOlapScan[t1] +----filter((t2.id > 1)) +------PhysicalOlapScan[t2] -- !pushdown_cross_join_subquery_outer -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----NestedLoopJoin[CROSS_JOIN] -------filter((t1.id > 1)) ---------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecReplicated] ---------PhysicalLimit[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------PhysicalLimit[LOCAL] ---------------PhysicalOlapScan[t2] +--NestedLoopJoin[CROSS_JOIN] +----filter((t1.id > 1)) +------PhysicalOlapScan[t1] +----PhysicalLimit[GLOBAL] +------PhysicalLimit[LOCAL] +--------PhysicalOlapScan[t2] diff --git a/regression-test/data/nereids_rules_p0/filter_push_down/push_filter_inside_join.out b/regression-test/data/nereids_rules_p0/filter_push_down/push_filter_inside_join.out index 5acf85a076efd51..355b4b231fc7af1 100644 --- a/regression-test/data/nereids_rules_p0/filter_push_down/push_filter_inside_join.out +++ b/regression-test/data/nereids_rules_p0/filter_push_down/push_filter_inside_join.out @@ -1,140 +1,103 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !pushdown_cross_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----NestedLoopJoin[INNER_JOIN](t1.msg > t2.msg) -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecReplicated] ---------PhysicalOlapScan[t2] +--NestedLoopJoin[INNER_JOIN](t1.msg > t2.msg) +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_cross_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.msg = t2.msg)) otherCondition=() -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.msg = t2.msg)) otherCondition=() +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_inner_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.msg > t2.msg)) -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.msg > t2.msg)) +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_left_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.msg > t2.msg)) -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.msg > t2.msg)) +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_right_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t2.msg < t1.msg)) -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t2.msg < t1.msg)) +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_full_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.msg < t2.msg)) -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.msg < t2.msg)) +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_cross_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----NestedLoopJoin[INNER_JOIN](t1.msg < t2.msg) -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecReplicated] ---------PhysicalOlapScan[t2] +--NestedLoopJoin[INNER_JOIN](t1.msg < t2.msg) +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_inner_join_hash -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.msg = t2.msg)) otherCondition=() -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.msg = t2.msg)) otherCondition=() +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_left_join_hash -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.msg = t2.msg)) otherCondition=() -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.msg = t2.msg)) otherCondition=() +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_right_join_hash -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t2.msg = t1.msg)) otherCondition=() -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t2.msg = t1.msg)) otherCondition=() +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_full_join_hash -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.msg = t2.msg)) otherCondition=() -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.msg = t2.msg)) otherCondition=() +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_cross_join_combine -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.msg = t2.msg)) otherCondition=(((cast(msg as DOUBLE) + cast(msg as DOUBLE)) = cast('' as DOUBLE))) -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.msg = t2.msg)) otherCondition=(((cast(msg as DOUBLE) + cast(msg as DOUBLE)) = cast('' as DOUBLE))) +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_inner_join_combine -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.msg = t2.msg)) otherCondition=(((cast(msg as DOUBLE) + cast(msg as DOUBLE)) = cast('' as DOUBLE))) -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.msg = t2.msg)) otherCondition=(((cast(msg as DOUBLE) + cast(msg as DOUBLE)) = cast('' as DOUBLE))) +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_left_join_combine -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.msg = t2.msg)) otherCondition=(((cast(msg as DOUBLE) + cast(msg as DOUBLE)) = cast('' as DOUBLE))) -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.msg = t2.msg)) otherCondition=(((cast(msg as DOUBLE) + cast(msg as DOUBLE)) = cast('' as DOUBLE))) +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_right_join_combine -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t2.msg = t1.msg)) otherCondition=(((cast(msg as DOUBLE) + cast(msg as DOUBLE)) = cast('' as DOUBLE))) -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t2.msg = t1.msg)) otherCondition=(((cast(msg as DOUBLE) + cast(msg as DOUBLE)) = cast('' as DOUBLE))) +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_full_join_combine -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.msg = t2.msg)) otherCondition=(((cast(msg as DOUBLE) + cast(msg as DOUBLE)) = cast('' as DOUBLE))) -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.msg = t2.msg)) otherCondition=(((cast(msg as DOUBLE) + cast(msg as DOUBLE)) = cast('' as DOUBLE))) +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !pushdown_cross_join_combine -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.msg = t2.msg)) otherCondition=(((cast(msg as DOUBLE) + cast(msg as DOUBLE)) = cast('' as DOUBLE))) -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.msg = t2.msg)) otherCondition=(((cast(msg as DOUBLE) + cast(msg as DOUBLE)) = cast('' as DOUBLE))) +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] diff --git a/regression-test/data/nereids_rules_p0/filter_push_down/push_filter_through.out b/regression-test/data/nereids_rules_p0/filter_push_down/push_filter_through.out index da3ef8a7a3890eb..82e2624942eb041 100644 --- a/regression-test/data/nereids_rules_p0/filter_push_down/push_filter_through.out +++ b/regression-test/data/nereids_rules_p0/filter_push_down/push_filter_through.out @@ -1,8 +1,9 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !filter_project_alias -- PhysicalResultSink ---filter((t1.id = 1)) -----PhysicalOlapScan[t1] +--PhysicalDistribute[DistributionSpecGather] +----filter((t1.id = 1)) +------PhysicalOlapScan[t1] -- !filter_project_constant -- PhysicalResultSink @@ -10,397 +11,499 @@ PhysicalResultSink -- !filter_project_arithmetic -- PhysicalResultSink ---filter((cast(id as BIGINT) = 1)) -----PhysicalOlapScan[t1] +--PhysicalDistribute[DistributionSpecGather] +----filter((cast(id as BIGINT) = 1)) +------PhysicalOlapScan[t1] -- !filter_order_by -- PhysicalResultSink ---filter((t.id = 1)) -----PhysicalOlapScan[t1] +--PhysicalDistribute[DistributionSpecGather] +----filter((t.id = 1)) +------PhysicalOlapScan[t1] -- !filter_order_by_limit -- PhysicalResultSink --filter((t.id = 1)) ----PhysicalTopN[MERGE_SORT] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecGather] +--------PhysicalTopN[LOCAL_SORT] +----------PhysicalOlapScan[t1] -- !filter_order_by_constant -- PhysicalResultSink ---filter((t.id = 1)) -----PhysicalOlapScan[t1] +--PhysicalDistribute[DistributionSpecGather] +----filter((t.id = 1)) +------PhysicalOlapScan[t1] -- !filter_join_inner -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----filter((t1.id = 1)) -------PhysicalOlapScan[t1] apply RFs: RF0 -----filter((t2.id = 1)) -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id = 1)) +--------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecHash] +--------filter((t2.id = 1)) +----------PhysicalOlapScan[t2] -- !filter_join_inner -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----filter((t1.id = 1)) -------PhysicalOlapScan[t1] apply RFs: RF0 -----filter((t2.id = 1)) -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id = 1)) +--------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecHash] +--------filter((t2.id = 1)) +----------PhysicalOlapScan[t2] -- !filter_join_inner -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----filter((t1.msg = '')) -------PhysicalOlapScan[t1] -----PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.msg = '')) +--------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecHash] +--------PhysicalOlapScan[t2] -- !filter_join_inner -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----filter((t1.id = 1) and (t1.id = 2)) -------PhysicalOlapScan[t1] apply RFs: RF0 -----filter((t2.id = 1) and (t2.id = 2)) -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id = 1) and (t1.id = 2)) +--------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecHash] +--------filter((t2.id = 1) and (t2.id = 2)) +----------PhysicalOlapScan[t2] -- !filter_join_inner -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=(((t1.id = 1) OR (t2.id = 2))) -----PhysicalOlapScan[t1] -----PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=(((t1.id = 1) OR (t2.id = 2))) +------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecHash] +--------PhysicalOlapScan[t2] -- !filter_join_left -- PhysicalResultSink ---hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----filter((t1.id = 1)) -------PhysicalOlapScan[t1] -----filter((t2.id = 1)) -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id = 1)) +--------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecHash] +--------filter((t2.id = 1)) +----------PhysicalOlapScan[t2] -- !filter_join_left -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----filter((t1.id = 1) and (t1.id = 2)) -------PhysicalOlapScan[t1] apply RFs: RF0 -----filter((t2.id = 1) and (t2.id = 2)) -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id = 1) and (t1.id = 2)) +--------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecHash] +--------filter((t2.id = 1) and (t2.id = 2)) +----------PhysicalOlapScan[t2] -- !filter_join_left -- PhysicalResultSink ---filter(((t1.id = 1) OR (t2.id = 2))) -----hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------PhysicalOlapScan[t1] -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----filter(((t1.id = 1) OR (t2.id = 2))) +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalDistribute[DistributionSpecHash] +----------PhysicalOlapScan[t2] -- !filter_join_right -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----filter((t1.id = 1)) -------PhysicalOlapScan[t1] apply RFs: RF0 -----filter((t2.id = 1)) -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id = 1)) +--------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecHash] +--------filter((t2.id = 1)) +----------PhysicalOlapScan[t2] -- !filter_join_full -- PhysicalResultSink ---hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----filter((t1.id = 1)) -------PhysicalOlapScan[t1] -----filter((t2.id = 1)) -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id = 1)) +--------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecHash] +--------filter((t2.id = 1)) +----------PhysicalOlapScan[t2] -- !filter_join_left -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----filter((t1.id = 1) and (t1.id = 2)) -------PhysicalOlapScan[t1] apply RFs: RF0 -----filter((t2.id = 1) and (t2.id = 2)) -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id = 1) and (t1.id = 2)) +--------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecHash] +--------filter((t2.id = 1) and (t2.id = 2)) +----------PhysicalOlapScan[t2] -- !filter_join_left -- PhysicalResultSink ---filter(((t1.id = 1) OR (t2.id = 2))) -----hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------PhysicalOlapScan[t1] -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----filter(((t1.id = 1) OR (t2.id = 2))) +------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalDistribute[DistributionSpecHash] +----------PhysicalOlapScan[t1] +--------PhysicalDistribute[DistributionSpecHash] +----------PhysicalOlapScan[t2] -- !filter_join_cross -- PhysicalResultSink ---NestedLoopJoin[CROSS_JOIN] -----filter((t1.id = 1)) -------PhysicalOlapScan[t1] -----PhysicalDistribute[DistributionSpecReplicated] -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----NestedLoopJoin[CROSS_JOIN] +------filter((t1.id = 1)) +--------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecReplicated] +--------PhysicalOlapScan[t2] -- !filter_join_left_anti -- PhysicalResultSink ---hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----filter((t1.id = 1)) -------PhysicalOlapScan[t1] -----filter((t2.id = 1)) -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id = 1)) +--------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecHash] +--------filter((t2.id = 1)) +----------PhysicalOlapScan[t2] -- !filter_join_left_semi -- PhysicalResultSink ---hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----filter((t1.id = 1)) -------PhysicalOlapScan[t1] apply RFs: RF0 -----filter((t2.id = 1)) -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id = 1)) +--------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecHash] +--------filter((t2.id = 1)) +----------PhysicalOlapScan[t2] -- !filter_join_right_anti -- PhysicalResultSink ---hashJoin[RIGHT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----filter((t1.id = 1)) -------PhysicalOlapScan[t1] apply RFs: RF0 -----filter((t2.id = 1)) -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[RIGHT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------PhysicalDistribute[DistributionSpecHash] +--------filter((t1.id = 1)) +----------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecHash] +--------filter((t2.id = 1)) +----------PhysicalOlapScan[t2] -- !filter_join_right_semi -- PhysicalResultSink ---hashJoin[RIGHT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----filter((t1.id = 1)) -------PhysicalOlapScan[t1] apply RFs: RF0 -----filter((t2.id = 1)) -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[RIGHT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id = 1)) +--------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecHash] +--------filter((t2.id = 1)) +----------PhysicalOlapScan[t2] -- !filter_join_right_semi -- PhysicalResultSink ---hashJoin[RIGHT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----filter((t1.id = 1)) -------PhysicalOlapScan[t1] apply RFs: RF0 -----filter((t2.id = 1)) -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[RIGHT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id = 1)) +--------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecHash] +--------filter((t2.id = 1)) +----------PhysicalOlapScan[t2] -- !filter_multi_inner -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() build RFs:RF1 id->[id] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------filter((t1.id = 1)) ---------PhysicalOlapScan[t1] apply RFs: RF0 RF1 -------filter((t2.id = 1)) ---------PhysicalOlapScan[t2] -----filter((t3.id = 1)) -------PhysicalOlapScan[t3] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------filter((t1.id = 1)) +----------PhysicalOlapScan[t1] +--------PhysicalDistribute[DistributionSpecHash] +----------filter((t2.id = 1)) +------------PhysicalOlapScan[t2] +------PhysicalDistribute[DistributionSpecHash] +--------filter((t3.id = 1)) +----------PhysicalOlapScan[t3] -- !filter_mixed_inner_left -- PhysicalResultSink ---filter((((t1.id = 1) AND (t2.id = 2)) OR (t3.id = 2))) -----hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------PhysicalOlapScan[t1] ---------PhysicalOlapScan[t2] -------PhysicalOlapScan[t3] +--PhysicalDistribute[DistributionSpecGather] +----filter((((t1.id = 1) AND (t2.id = 2)) OR (t3.id = 2))) +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +--------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[t1] +----------PhysicalDistribute[DistributionSpecHash] +------------PhysicalOlapScan[t2] +--------PhysicalDistribute[DistributionSpecHash] +----------PhysicalOlapScan[t3] -- !filter_multi_left -- PhysicalResultSink ---filter((((t1.id = 1) AND (t2.id > 1)) OR (t3.id < 4))) -----hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() -------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------PhysicalOlapScan[t1] ---------PhysicalOlapScan[t2] -------PhysicalOlapScan[t3] +--PhysicalDistribute[DistributionSpecGather] +----filter((((t1.id = 1) AND (t2.id > 1)) OR (t3.id < 4))) +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +--------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[t1] +----------PhysicalDistribute[DistributionSpecHash] +------------PhysicalOlapScan[t2] +--------PhysicalDistribute[DistributionSpecHash] +----------PhysicalOlapScan[t3] -- !filter_multi_outer -- PhysicalResultSink ---hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() -----hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------filter((t1.id = 1)) ---------PhysicalOlapScan[t1] -------filter((t2.id = 1)) ---------PhysicalOlapScan[t2] -----filter((t3.id = 1)) -------PhysicalOlapScan[t3] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------filter((t1.id = 1)) +----------PhysicalOlapScan[t1] +--------PhysicalDistribute[DistributionSpecHash] +----------filter((t2.id = 1)) +------------PhysicalOlapScan[t2] +------PhysicalDistribute[DistributionSpecHash] +--------filter((t3.id = 1)) +----------PhysicalOlapScan[t3] -- !filter_multi_cross -- PhysicalResultSink ---NestedLoopJoin[CROSS_JOIN] +--PhysicalDistribute[DistributionSpecGather] ----NestedLoopJoin[CROSS_JOIN] -------filter((t1.id = 1)) ---------PhysicalOlapScan[t1] +------NestedLoopJoin[CROSS_JOIN] +--------filter((t1.id = 1)) +----------PhysicalOlapScan[t1] +--------PhysicalDistribute[DistributionSpecReplicated] +----------PhysicalOlapScan[t2] ------PhysicalDistribute[DistributionSpecReplicated] ---------PhysicalOlapScan[t2] -----PhysicalDistribute[DistributionSpecReplicated] -------PhysicalOlapScan[t3] +--------PhysicalOlapScan[t3] -- !filter_multi_mixed -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((t1.id = t4.id)) otherCondition=() build RFs:RF1 id->[id] -----hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------filter((t1.id = 1)) -----------PhysicalOlapScan[t1] apply RFs: RF0 RF1 ---------filter((t2.id = 1)) -----------PhysicalOlapScan[t2] -------filter((t3.id = 1)) ---------PhysicalOlapScan[t3] -----filter((t4.id = 1)) -------PhysicalOlapScan[t4] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t4.id)) otherCondition=() +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +--------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------filter((t1.id = 1)) +------------PhysicalOlapScan[t1] +----------PhysicalDistribute[DistributionSpecHash] +------------filter((t2.id = 1)) +--------------PhysicalOlapScan[t2] +--------PhysicalDistribute[DistributionSpecHash] +----------filter((t3.id = 1)) +------------PhysicalOlapScan[t3] +------PhysicalDistribute[DistributionSpecHash] +--------filter((t4.id = 1)) +----------PhysicalOlapScan[t4] -- !filter_aggregation_filtered_agg_func -- PhysicalResultSink ---filter((count(*) > 10)) -----hashAgg[GLOBAL] -------hashAgg[LOCAL] ---------PhysicalOlapScan[t1] +--PhysicalDistribute[DistributionSpecGather] +----filter((count(*) > 10)) +------hashAgg[GLOBAL] +--------PhysicalDistribute[DistributionSpecHash] +----------hashAgg[LOCAL] +------------PhysicalOlapScan[t1] -- !filter_aggregation_group_set -- PhysicalResultSink ---filter((cast(msg as DOUBLE) = 1.0)) -----hashAgg[GLOBAL] -------hashAgg[LOCAL] ---------PhysicalRepeat -----------filter((t1.id > 10)) -------------PhysicalOlapScan[t1] +--PhysicalDistribute[DistributionSpecGather] +----filter((cast(msg as DOUBLE) = 1.0)) +------hashAgg[GLOBAL] +--------PhysicalDistribute[DistributionSpecHash] +----------hashAgg[LOCAL] +------------PhysicalRepeat +--------------filter((t1.id > 10)) +----------------PhysicalOlapScan[t1] -- !filter_aggregation_group_set -- PhysicalResultSink ---filter(((t1.id > 10) OR (cast(msg as DOUBLE) = 1.0))) -----hashAgg[GLOBAL] -------hashAgg[LOCAL] ---------PhysicalRepeat -----------PhysicalOlapScan[t1] +--PhysicalDistribute[DistributionSpecGather] +----filter(((t1.id > 10) OR (cast(msg as DOUBLE) = 1.0))) +------hashAgg[GLOBAL] +--------PhysicalDistribute[DistributionSpecHash] +----------hashAgg[LOCAL] +------------PhysicalRepeat +--------------PhysicalOlapScan[t1] -- !filter_aggregation_filtered_key -- PhysicalResultSink ---hashAgg[LOCAL] -----filter((t1.id > 10)) -------PhysicalOlapScan[t1] +--PhysicalDistribute[DistributionSpecGather] +----hashAgg[LOCAL] +------filter((t1.id > 10)) +--------PhysicalOlapScan[t1] -- !filter_aggregation_filtered_part_key -- PhysicalResultSink ---hashAgg[LOCAL] -----filter((t1.id > 10)) -------PhysicalOlapScan[t1] +--PhysicalDistribute[DistributionSpecGather] +----hashAgg[LOCAL] +------filter((t1.id > 10)) +--------PhysicalOlapScan[t1] -- !filter_aggregation_filtered_part_key -- -- !filter_aggregation_filtered_part_key -- PhysicalResultSink ---filter((t.c > 10)) -----hashAgg[LOCAL] -------PhysicalOlapScan[t1] +--PhysicalDistribute[DistributionSpecGather] +----filter((t.c > 10)) +------hashAgg[LOCAL] +--------PhysicalOlapScan[t1] -- !push_filter_union -- PhysicalResultSink ---hashAgg[GLOBAL] -----hashAgg[LOCAL] -------PhysicalUnion ---------PhysicalDistribute[DistributionSpecExecutionAny] -----------filter((t1.id = 2)) -------------PhysicalOlapScan[t1] ---------PhysicalDistribute[DistributionSpecExecutionAny] -----------filter((t2.id = 2)) -------------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----hashAgg[GLOBAL] +------PhysicalDistribute[DistributionSpecHash] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------PhysicalDistribute[DistributionSpecExecutionAny] +--------------filter((t1.id = 2)) +----------------PhysicalOlapScan[t1] +------------PhysicalDistribute[DistributionSpecExecutionAny] +--------------filter((t2.id = 2)) +----------------PhysicalOlapScan[t2] -- !push_filter_union_all -- PhysicalResultSink ---PhysicalUnion -----PhysicalDistribute[DistributionSpecExecutionAny] -------filter((t1.id = 2)) ---------PhysicalOlapScan[t1] -----PhysicalDistribute[DistributionSpecExecutionAny] -------filter((t2.id = 2)) ---------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----PhysicalUnion +------PhysicalDistribute[DistributionSpecExecutionAny] +--------filter((t1.id = 2)) +----------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecExecutionAny] +--------filter((t2.id = 2)) +----------PhysicalOlapScan[t2] -- !push_filter_intersect -- PhysicalResultSink ---PhysicalIntersect -----filter((t1.id = 2)) -------PhysicalOlapScan[t1] -----filter((t2.id = 2)) -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----PhysicalIntersect +------PhysicalDistribute[DistributionSpecHash] +--------filter((t1.id = 2)) +----------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecHash] +--------filter((t2.id = 2)) +----------PhysicalOlapScan[t2] -- !push_filter_except -- PhysicalResultSink ---PhysicalExcept -----filter((t1.id = 2)) -------PhysicalOlapScan[t1] -----filter((t2.id = 2)) -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----PhysicalExcept +------PhysicalDistribute[DistributionSpecHash] +--------filter((t1.id = 2)) +----------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecHash] +--------filter((t2.id = 2)) +----------PhysicalOlapScan[t2] -- !push_filter_union -- PhysicalResultSink ---hashAgg[GLOBAL] -----hashAgg[LOCAL] -------PhysicalUnion ---------PhysicalDistribute[DistributionSpecExecutionAny] -----------filter((cast(random() as INT) = 2)) -------------PhysicalOneRowRelation ---------PhysicalDistribute[DistributionSpecExecutionAny] -----------filter((t2.id = 2)) -------------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----hashAgg[GLOBAL] +------PhysicalDistribute[DistributionSpecHash] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------PhysicalDistribute[DistributionSpecExecutionAny] +--------------filter((cast(random() as INT) = 2)) +----------------PhysicalOneRowRelation +------------PhysicalDistribute[DistributionSpecExecutionAny] +--------------filter((t2.id = 2)) +----------------PhysicalOlapScan[t2] -- !push_filter_union_all -- PhysicalResultSink ---PhysicalUnion -----PhysicalDistribute[DistributionSpecExecutionAny] -------filter(((cast(random() as INT) = 2) OR (cast(random() as INT) = 3))) ---------PhysicalOneRowRelation -----PhysicalDistribute[DistributionSpecExecutionAny] -------filter(id IN (2, 3)) ---------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----PhysicalUnion +------PhysicalDistribute[DistributionSpecExecutionAny] +--------filter(((cast(random() as INT) = 2) OR (cast(random() as INT) = 3))) +----------PhysicalOneRowRelation +------PhysicalDistribute[DistributionSpecExecutionAny] +--------filter(id IN (2, 3)) +----------PhysicalOlapScan[t2] -- !push_filter_intersect -- PhysicalResultSink ---PhysicalIntersect -----filter(((cast(random() as INT) = 2) OR (cast(random() as INT) = 3))) -------PhysicalOneRowRelation -----filter(id IN (2, 3)) -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----PhysicalIntersect +------PhysicalDistribute[DistributionSpecHash] +--------filter(((cast(random() as INT) = 2) OR (cast(random() as INT) = 3))) +----------PhysicalOneRowRelation +------PhysicalDistribute[DistributionSpecHash] +--------filter(id IN (2, 3)) +----------PhysicalOlapScan[t2] -- !push_filter_except -- PhysicalResultSink ---PhysicalExcept -----filter((cast(random() as INT) = 2) and (t1.msg = '')) -------PhysicalOlapScan[t1] -----filter((t2.id = 2) and (t2.msg = '')) -------PhysicalOlapScan[t2] +--PhysicalDistribute[DistributionSpecGather] +----PhysicalExcept +------PhysicalDistribute[DistributionSpecHash] +--------filter((cast(random() as INT) = 2) and (t1.msg = '')) +----------PhysicalOlapScan[t1] +------PhysicalDistribute[DistributionSpecHash] +--------filter((t2.id = 2) and (t2.msg = '')) +----------PhysicalOlapScan[t2] -- !push_filter_except -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((t3.id = t.id)) otherCondition=() -----PhysicalExcept -------filter((t1.id = 2)) ---------PhysicalOlapScan[t1] -------filter((t2.id = 2)) ---------PhysicalOlapScan[t2] -----PhysicalOlapScan[t3] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((t3.id = t.id)) otherCondition=() +------PhysicalDistribute[DistributionSpecHash] +--------PhysicalExcept +----------PhysicalDistribute[DistributionSpecHash] +------------filter((t1.id = 2)) +--------------PhysicalOlapScan[t1] +----------PhysicalDistribute[DistributionSpecHash] +------------filter((t2.id = 2)) +--------------PhysicalOlapScan[t2] +------PhysicalDistribute[DistributionSpecHash] +--------PhysicalOlapScan[t3] -- !push_filter_subquery -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((t3.id = t.id)) otherCondition=() -----PhysicalExcept -------filter((t1.id = 2)) ---------PhysicalOlapScan[t1] -------filter((t2.id = 2)) ---------PhysicalOlapScan[t2] -----PhysicalOlapScan[t3] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((t3.id = t.id)) otherCondition=() +------PhysicalDistribute[DistributionSpecHash] +--------PhysicalExcept +----------PhysicalDistribute[DistributionSpecHash] +------------filter((t1.id = 2)) +--------------PhysicalOlapScan[t1] +----------PhysicalDistribute[DistributionSpecHash] +------------filter((t2.id = 2)) +--------------PhysicalOlapScan[t2] +------PhysicalDistribute[DistributionSpecHash] +--------PhysicalOlapScan[t3] -- !filter_window_row_number -- PhysicalResultSink ---PhysicalWindow -----PhysicalQuickSort[LOCAL_SORT] -------filter((t1.id <= 5)) ---------PhysicalOlapScan[t1] +--PhysicalDistribute[DistributionSpecGather] +----PhysicalWindow +------PhysicalQuickSort[LOCAL_SORT] +--------filter((t1.id <= 5)) +----------PhysicalOlapScan[t1] -- !filter_window_order_row_number -- PhysicalResultSink ---PhysicalWindow -----PhysicalQuickSort[LOCAL_SORT] -------filter((t1.id <= 5)) ---------PhysicalOlapScan[t1] +--PhysicalDistribute[DistributionSpecGather] +----PhysicalWindow +------PhysicalQuickSort[LOCAL_SORT] +--------filter((t1.id <= 5)) +----------PhysicalOlapScan[t1] -- !filter_window_row_number_complex_predicate -- PhysicalResultSink ---PhysicalWindow -----PhysicalQuickSort[LOCAL_SORT] -------filter(((cast(id as DOUBLE) + cast(msg as DOUBLE)) = cast('' as DOUBLE))) ---------PhysicalOlapScan[t1] +--PhysicalDistribute[DistributionSpecGather] +----PhysicalWindow +------PhysicalQuickSort[LOCAL_SORT] +--------PhysicalDistribute[DistributionSpecHash] +----------filter(((cast(id as DOUBLE) + cast(msg as DOUBLE)) = cast('' as DOUBLE))) +------------PhysicalOlapScan[t1] -- !filter_multi_window -- PhysicalResultSink ---PhysicalWindow -----PhysicalQuickSort[LOCAL_SORT] -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------filter(((t1.msg = '') OR (t1.id = 2))) -------------PhysicalOlapScan[t1] +--PhysicalDistribute[DistributionSpecGather] +----PhysicalWindow +------PhysicalQuickSort[LOCAL_SORT] +--------PhysicalDistribute[DistributionSpecHash] +----------PhysicalWindow +------------PhysicalQuickSort[LOCAL_SORT] +--------------PhysicalDistribute[DistributionSpecHash] +----------------filter(((t1.msg = '') OR (t1.id = 2))) +------------------PhysicalOlapScan[t1] diff --git a/regression-test/data/nereids_rules_p0/limit_push_down/limit_push_down.out b/regression-test/data/nereids_rules_p0/limit_push_down/limit_push_down.out index 4f845e5b4064cef..31ffeee07dd2042 100644 --- a/regression-test/data/nereids_rules_p0/limit_push_down/limit_push_down.out +++ b/regression-test/data/nereids_rules_p0/limit_push_down/limit_push_down.out @@ -2,228 +2,185 @@ -- !limit_project -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------PhysicalOlapScan[t1] +----PhysicalLimit[LOCAL] +------PhysicalOlapScan[t1] -- !limit_offset_project -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------PhysicalOlapScan[t1] +----PhysicalLimit[LOCAL] +------PhysicalOlapScan[t1] -- !limit_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !limit_semi_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !right_semi_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !left_anti_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !right_anti_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[RIGHT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashJoin[RIGHT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !full_outer_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !left_outer_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalLimit[LOCAL] -------------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalLimit[LOCAL] +----------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !right_outer_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t1] apply RFs: RF0 -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalLimit[LOCAL] ---------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalLimit[LOCAL] +----------PhysicalOlapScan[t2] -- !cross_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------NestedLoopJoin[CROSS_JOIN] -----------PhysicalLimit[LOCAL] -------------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecReplicated] -------------PhysicalLimit[LOCAL] ---------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------NestedLoopJoin[CROSS_JOIN] +--------PhysicalLimit[LOCAL] +----------PhysicalOlapScan[t1] +--------PhysicalLimit[LOCAL] +----------PhysicalOlapScan[t2] -- !limit_offset_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !limit_distinct -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashAgg[LOCAL] -----------PhysicalOlapScan[t1] +----PhysicalLimit[LOCAL] +------hashAgg[LOCAL] +--------PhysicalOlapScan[t1] -- !limit_offset_agg -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalOlapScan[t1] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[LOCAL] +--------PhysicalOlapScan[t1] -- !limit_distinct -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashAgg[LOCAL] -----------NestedLoopJoin[CROSS_JOIN] -------------PhysicalLimit[LOCAL] ---------------hashAgg[LOCAL] -----------------PhysicalOlapScan[t1] -------------PhysicalDistribute[DistributionSpecReplicated] ---------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashAgg[LOCAL] +--------NestedLoopJoin[CROSS_JOIN] +----------PhysicalLimit[LOCAL] +------------hashAgg[LOCAL] +--------------PhysicalOlapScan[t1] +----------PhysicalOlapScan[t2] -- !limit_distinct -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashAgg[LOCAL] -----------NestedLoopJoin[CROSS_JOIN] -------------PhysicalLimit[LOCAL] ---------------hashAgg[LOCAL] -----------------PhysicalOlapScan[t1] -------------PhysicalDistribute[DistributionSpecReplicated] ---------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashAgg[LOCAL] +--------NestedLoopJoin[CROSS_JOIN] +----------PhysicalLimit[LOCAL] +------------hashAgg[LOCAL] +--------------PhysicalOlapScan[t1] +----------PhysicalOlapScan[t2] -- !limit_distinct -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalLimit[LOCAL] ---------------hashAgg[LOCAL] -----------------PhysicalOlapScan[t1] -------------PhysicalDistribute[DistributionSpecHash] ---------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashAgg[LOCAL] +--------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalLimit[LOCAL] +------------hashAgg[LOCAL] +--------------PhysicalOlapScan[t1] +----------PhysicalOlapScan[t2] -- !limit_distinct -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashAgg[LOCAL] -----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalLimit[LOCAL] ---------------hashAgg[LOCAL] -----------------PhysicalOlapScan[t1] -------------PhysicalDistribute[DistributionSpecHash] ---------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashAgg[LOCAL] +--------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalLimit[LOCAL] +------------hashAgg[LOCAL] +--------------PhysicalOlapScan[t1] +----------PhysicalOlapScan[t2] -- !limit_offset_agg -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalOlapScan[t1] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[LOCAL] +--------PhysicalOlapScan[t1] -- !limit_set_operation -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalLimit[LOCAL] ---------------------hashAgg[LOCAL] -----------------------PhysicalOlapScan[t1] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalLimit[LOCAL] ---------------------hashAgg[LOCAL] -----------------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------PhysicalLimit[LOCAL] +--------------hashAgg[LOCAL] +----------------PhysicalOlapScan[t1] +------------PhysicalLimit[LOCAL] +--------------hashAgg[LOCAL] +----------------PhysicalOlapScan[t2] -- !limit_offset_set_operation -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------PhysicalIntersect -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------PhysicalIntersect +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !limit_window -- PhysicalResultSink @@ -231,10 +188,9 @@ PhysicalResultSink ----PhysicalLimit[LOCAL] ------PhysicalWindow --------PhysicalQuickSort[MERGE_SORT] -----------PhysicalDistribute[DistributionSpecGather] -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalPartitionTopN -----------------PhysicalOlapScan[t1] +----------PhysicalQuickSort[LOCAL_SORT] +------------PhysicalPartitionTopN +--------------PhysicalOlapScan[t1] -- !limit_offset_window -- PhysicalResultSink @@ -242,71 +198,61 @@ PhysicalResultSink ----PhysicalLimit[LOCAL] ------PhysicalWindow --------PhysicalQuickSort[MERGE_SORT] -----------PhysicalDistribute[DistributionSpecGather] -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalPartitionTopN -----------------PhysicalOlapScan[t1] +----------PhysicalQuickSort[LOCAL_SORT] +------------PhysicalPartitionTopN +--------------PhysicalOlapScan[t1] -- !limit_filter -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------filter((t1.id = 1)) -----------PhysicalOlapScan[t1] +----PhysicalLimit[LOCAL] +------filter((t1.id = 1)) +--------PhysicalOlapScan[t1] -- !limit_offset_filter -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------filter((t1.id = 1)) -----------PhysicalOlapScan[t1] +----PhysicalLimit[LOCAL] +------filter((t1.id = 1)) +--------PhysicalOlapScan[t1] -- !limit_project_filter -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------filter((t1.id > 100)) -----------PhysicalOlapScan[t1] +----PhysicalLimit[LOCAL] +------filter((t1.id > 100)) +--------PhysicalOlapScan[t1] -- !limit_join_filter -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----------filter((t1.id > 100)) -------------PhysicalOlapScan[t1] apply RFs: RF0 -----------PhysicalDistribute[DistributionSpecHash] -------------filter((t2.id > 100)) ---------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------filter((t1.id > 100)) +----------PhysicalOlapScan[t1] +--------filter((t2.id > 100)) +----------PhysicalOlapScan[t2] -- !limit_subquery -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------PhysicalOlapScan[t1] +----PhysicalLimit[LOCAL] +------PhysicalOlapScan[t1] -- !limit_subquery_filter -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------filter((t1.id > 100)) -----------PhysicalOlapScan[t1] +----PhysicalLimit[LOCAL] +------filter((t1.id > 100)) +--------PhysicalOlapScan[t1] -- !limit_subquery_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !limit_subquery_window -- PhysicalResultSink @@ -314,208 +260,164 @@ PhysicalResultSink ----PhysicalLimit[LOCAL] ------PhysicalWindow --------PhysicalQuickSort[MERGE_SORT] -----------PhysicalDistribute[DistributionSpecGather] -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalPartitionTopN -----------------PhysicalOlapScan[t1] +----------PhysicalQuickSort[LOCAL_SORT] +------------PhysicalPartitionTopN +--------------PhysicalOlapScan[t1] -- !limit_nested_subquery -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------PhysicalOlapScan[t1] +----PhysicalLimit[LOCAL] +------PhysicalOlapScan[t1] -- !limit_union_filter -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalLimit[LOCAL] ---------------------hashAgg[LOCAL] -----------------------filter((t1.id > 100)) -------------------------PhysicalOlapScan[t1] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalLimit[LOCAL] ---------------------hashAgg[LOCAL] -----------------------filter((t2.id > 100)) -------------------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------PhysicalLimit[LOCAL] +--------------hashAgg[LOCAL] +----------------filter((t1.id > 100)) +------------------PhysicalOlapScan[t1] +------------PhysicalLimit[LOCAL] +--------------hashAgg[LOCAL] +----------------filter((t2.id > 100)) +------------------PhysicalOlapScan[t2] -- !limit_union_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalLimit[LOCAL] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalOlapScan[t1] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalOlapScan[t2] -----------------PhysicalDistribute[DistributionSpecExecutionAny] +----PhysicalLimit[LOCAL] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------PhysicalLimit[LOCAL] +--------------hashAgg[GLOBAL] +----------------hashAgg[LOCAL] +------------------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------------------PhysicalOlapScan[t1] +--------------------PhysicalOlapScan[t2] +------------PhysicalLimit[LOCAL] +--------------hashAgg[LOCAL] +----------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t3.id = t4.id)) otherCondition=() ------------------PhysicalLimit[LOCAL] --------------------hashAgg[LOCAL] -----------------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t3.id = t4.id)) otherCondition=() -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalOlapScan[t3] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------PhysicalOlapScan[t4] +----------------------PhysicalOlapScan[t3] +------------------PhysicalOlapScan[t4] -- !limit_union_window -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashAgg[GLOBAL] -----------hashAgg[LOCAL] -------------PhysicalUnion ---------------PhysicalLimit[LOCAL] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalWindow -------------------------PhysicalQuickSort[MERGE_SORT] ---------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------PhysicalQuickSort[LOCAL_SORT] -------------------------------PhysicalOlapScan[t1] ---------------PhysicalLimit[LOCAL] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalWindow -------------------------PhysicalQuickSort[MERGE_SORT] ---------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------PhysicalQuickSort[LOCAL_SORT] -------------------------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------PhysicalLimit[LOCAL] +--------------hashAgg[GLOBAL] +----------------hashAgg[LOCAL] +------------------PhysicalWindow +--------------------PhysicalQuickSort[MERGE_SORT] +----------------------PhysicalQuickSort[LOCAL_SORT] +------------------------PhysicalOlapScan[t1] +------------PhysicalLimit[LOCAL] +--------------hashAgg[GLOBAL] +----------------hashAgg[LOCAL] +------------------PhysicalWindow +--------------------PhysicalQuickSort[MERGE_SORT] +----------------------PhysicalQuickSort[LOCAL_SORT] +------------------------PhysicalOlapScan[t2] -- !limit_subquery_join_filter -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----------filter((t1.id > 100)) -------------PhysicalOlapScan[t1] apply RFs: RF0 -----------PhysicalDistribute[DistributionSpecHash] -------------filter((t2.id > 100)) ---------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------filter((t1.id > 100)) +----------PhysicalOlapScan[t1] +--------filter((t2.id > 100)) +----------PhysicalOlapScan[t2] -- !limit_subquery_join_window -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------PhysicalWindow -----------PhysicalQuickSort[LOCAL_SORT] -------------PhysicalPartitionTopN ---------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------PhysicalOlapScan[t1] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------PhysicalWindow +--------PhysicalQuickSort[LOCAL_SORT] +----------PhysicalPartitionTopN +------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------------PhysicalOlapScan[t1] +--------------PhysicalOlapScan[t2] -- !limit_subquery_union_filter -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalLimit[LOCAL] ---------------------hashAgg[LOCAL] -----------------------filter((t1.id > 100)) -------------------------PhysicalOlapScan[t1] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalLimit[LOCAL] ---------------------hashAgg[LOCAL] -----------------------filter((t2.id > 100)) -------------------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------PhysicalLimit[LOCAL] +--------------hashAgg[LOCAL] +----------------filter((t1.id > 100)) +------------------PhysicalOlapScan[t1] +------------PhysicalLimit[LOCAL] +--------------hashAgg[LOCAL] +----------------filter((t2.id > 100)) +------------------PhysicalOlapScan[t2] -- !limit_subquery_union_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalLimit[LOCAL] ---------------------hashAgg[LOCAL] -----------------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------------------PhysicalOlapScan[t1] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------PhysicalOlapScan[t2] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalLimit[LOCAL] ---------------------hashAgg[LOCAL] -----------------------hashJoin[INNER_JOIN] hashCondition=((t3.id = t4.id)) otherCondition=() build RFs:RF1 id->[id] -------------------------PhysicalOlapScan[t3] apply RFs: RF1 -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------PhysicalOlapScan[t4] +----PhysicalLimit[LOCAL] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------PhysicalLimit[LOCAL] +--------------hashAgg[LOCAL] +----------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------------------PhysicalOlapScan[t1] +------------------PhysicalOlapScan[t2] +------------PhysicalLimit[LOCAL] +--------------hashAgg[LOCAL] +----------------hashJoin[INNER_JOIN] hashCondition=((t3.id = t4.id)) otherCondition=() +------------------PhysicalOlapScan[t3] +------------------PhysicalOlapScan[t4] -- !limit_subquery_union_window -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------PhysicalWindow -----------PhysicalPartitionTopN -------------PhysicalDistribute[DistributionSpecHash] ---------------PhysicalPartitionTopN -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalUnion -------------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------------PhysicalOlapScan[t1] -------------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------PhysicalWindow +--------PhysicalPartitionTopN +----------PhysicalPartitionTopN +------------hashAgg[GLOBAL] +--------------hashAgg[LOCAL] +----------------PhysicalUnion +------------------PhysicalOlapScan[t1] +------------------PhysicalOlapScan[t2] -- !limit_correlated_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((t2.id = t1.id)) otherCondition=() -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((t2.id = t1.id)) otherCondition=() +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !limit_correlated_subquery_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((t3.id = t1.id)) otherCondition=() -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------PhysicalOlapScan[t1] ---------PhysicalDistribute[DistributionSpecHash] -----------PhysicalOlapScan[t2] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t3] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((t3.id = t1.id)) otherCondition=() +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------PhysicalOlapScan[t1] +------PhysicalOlapScan[t2] +----PhysicalOlapScan[t3] -- !limit_correlated_subquery_window -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----PhysicalWindow -------PhysicalQuickSort[LOCAL_SORT] ---------hashJoin[LEFT_SEMI_JOIN] hashCondition=((t2.id = t1.id)) otherCondition=() -----------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +--PhysicalWindow +----PhysicalQuickSort[LOCAL_SORT] +------hashJoin[LEFT_SEMI_JOIN] hashCondition=((t2.id = t1.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !limit_cte_query -- PhysicalCteAnchor ( cteId=CTEId#0 ) @@ -524,10 +426,8 @@ PhysicalCteAnchor ( cteId=CTEId#0 ) ------PhysicalOlapScan[t1] --PhysicalResultSink ----PhysicalLimit[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalLimit[LOCAL] -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalCteConsumer ( cteId=CTEId#0 ) +------PhysicalLimit[LOCAL] +--------PhysicalCteConsumer ( cteId=CTEId#0 ) -- !limit_cte_query_join -- PhysicalCteAnchor ( cteId=CTEId#0 ) @@ -540,13 +440,10 @@ PhysicalCteAnchor ( cteId=CTEId#0 ) --------PhysicalOlapScan[t2] ----PhysicalResultSink ------PhysicalLimit[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalLimit[LOCAL] -------------hashJoin[FULL_OUTER_JOIN] hashCondition=((cte1.id = cte2.id)) otherCondition=() ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalCteConsumer ( cteId=CTEId#1 ) +--------PhysicalLimit[LOCAL] +----------hashJoin[FULL_OUTER_JOIN] hashCondition=((cte1.id = cte2.id)) otherCondition=() +------------PhysicalCteConsumer ( cteId=CTEId#0 ) +------------PhysicalCteConsumer ( cteId=CTEId#1 ) -- !limit_cte_query_window -- PhysicalCteAnchor ( cteId=CTEId#0 ) @@ -558,56 +455,47 @@ PhysicalCteAnchor ( cteId=CTEId#0 ) ------PhysicalLimit[LOCAL] --------PhysicalWindow ----------PhysicalQuickSort[MERGE_SORT] -------------PhysicalDistribute[DistributionSpecGather] ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalPartitionTopN -------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------PhysicalCteConsumer ( cteId=CTEId#0 ) +------------PhysicalQuickSort[LOCAL_SORT] +--------------PhysicalPartitionTopN +----------------PhysicalCteConsumer ( cteId=CTEId#0 ) -- !limit_project_filter -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------filter((t1.id > 100)) -----------PhysicalOlapScan[t1] +----PhysicalLimit[LOCAL] +------filter((t1.id > 100)) +--------PhysicalOlapScan[t1] -- !limit_join_filter -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----------filter((t1.id > 100)) -------------PhysicalOlapScan[t1] apply RFs: RF0 -----------PhysicalDistribute[DistributionSpecHash] -------------filter((t2.id > 100)) ---------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------filter((t1.id > 100)) +----------PhysicalOlapScan[t1] +--------filter((t2.id > 100)) +----------PhysicalOlapScan[t2] -- !limit_subquery -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------PhysicalOlapScan[t1] +----PhysicalLimit[LOCAL] +------PhysicalOlapScan[t1] -- !limit_subquery_filter -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------filter((t1.id > 100)) -----------PhysicalOlapScan[t1] +----PhysicalLimit[LOCAL] +------filter((t1.id > 100)) +--------PhysicalOlapScan[t1] -- !limit_subquery_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !limit_subquery_window -- PhysicalResultSink @@ -615,305 +503,229 @@ PhysicalResultSink ----PhysicalLimit[LOCAL] ------PhysicalWindow --------PhysicalQuickSort[MERGE_SORT] -----------PhysicalDistribute[DistributionSpecGather] -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalPartitionTopN -----------------PhysicalOlapScan[t1] +----------PhysicalQuickSort[LOCAL_SORT] +------------PhysicalPartitionTopN +--------------PhysicalOlapScan[t1] -- !limit_nested_subquery -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------PhysicalOlapScan[t1] +----PhysicalLimit[LOCAL] +------PhysicalOlapScan[t1] -- !limit_subquery_order_by -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------PhysicalOlapScan[t1] +----PhysicalLimit[LOCAL] +------PhysicalOlapScan[t1] -- !limit_subquery_order_by_offset -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------PhysicalOlapScan[t1] +----PhysicalLimit[LOCAL] +------PhysicalOlapScan[t1] -- !limit_subquery_distinct -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashAgg[LOCAL] -----------PhysicalOlapScan[t1] +----PhysicalLimit[LOCAL] +------hashAgg[LOCAL] +--------PhysicalOlapScan[t1] -- !limit_cross_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------NestedLoopJoin[CROSS_JOIN] -----------PhysicalLimit[LOCAL] -------------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecReplicated] -------------PhysicalLimit[LOCAL] ---------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------NestedLoopJoin[CROSS_JOIN] +--------PhysicalLimit[LOCAL] +----------PhysicalOlapScan[t1] +--------PhysicalLimit[LOCAL] +----------PhysicalOlapScan[t2] -- !limit_multiple_left_outer_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() -----------PhysicalLimit[LOCAL] -------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------PhysicalLimit[LOCAL] -----------------PhysicalOlapScan[t1] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t2] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t3] +----PhysicalLimit[LOCAL] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +--------PhysicalLimit[LOCAL] +----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------------PhysicalLimit[LOCAL] +--------------PhysicalOlapScan[t1] +------------PhysicalOlapScan[t2] +--------PhysicalOlapScan[t3] -- !limit_multiple_right_outer_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() build RFs:RF1 id->[id] -----------PhysicalDistribute[DistributionSpecHash] -------------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t1] apply RFs: RF1 ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t2] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalLimit[LOCAL] ---------------PhysicalOlapScan[t3] +----PhysicalLimit[LOCAL] +------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +--------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[t1] +----------PhysicalOlapScan[t2] +--------PhysicalLimit[LOCAL] +----------PhysicalOlapScan[t3] -- !limit_multiple_full_outer_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() -----------PhysicalDistribute[DistributionSpecHash] -------------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t1] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t2] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t3] +----PhysicalLimit[LOCAL] +------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +--------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[t1] +----------PhysicalOlapScan[t2] +--------PhysicalOlapScan[t3] -- !limit_multiple_cross_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------NestedLoopJoin[CROSS_JOIN] -----------PhysicalLimit[LOCAL] -------------NestedLoopJoin[CROSS_JOIN] ---------------PhysicalLimit[LOCAL] -----------------PhysicalOlapScan[t1] ---------------PhysicalDistribute[DistributionSpecReplicated] -----------------PhysicalLimit[LOCAL] -------------------PhysicalOlapScan[t2] -----------PhysicalDistribute[DistributionSpecReplicated] +----PhysicalLimit[LOCAL] +------NestedLoopJoin[CROSS_JOIN] +--------PhysicalLimit[LOCAL] +----------NestedLoopJoin[CROSS_JOIN] ------------PhysicalLimit[LOCAL] ---------------PhysicalOlapScan[t3] +--------------PhysicalOlapScan[t1] +------------PhysicalLimit[LOCAL] +--------------PhysicalOlapScan[t2] +--------PhysicalLimit[LOCAL] +----------PhysicalOlapScan[t3] -- !limit_left_outer_join_right_outer_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() build RFs:RF0 id->[id] -----------PhysicalDistribute[DistributionSpecHash] -------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------PhysicalOlapScan[t1] apply RFs: RF0 ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t2] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalLimit[LOCAL] ---------------PhysicalOlapScan[t3] +----PhysicalLimit[LOCAL] +------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +--------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[t1] +----------PhysicalOlapScan[t2] +--------PhysicalLimit[LOCAL] +----------PhysicalOlapScan[t3] -- !limit_left_outer_join_full_outer_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() -----------PhysicalDistribute[DistributionSpecHash] -------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------PhysicalOlapScan[t1] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t2] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t3] +----PhysicalLimit[LOCAL] +------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +--------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[t1] +----------PhysicalOlapScan[t2] +--------PhysicalOlapScan[t3] -- !limit_left_outer_join_cross_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------NestedLoopJoin[CROSS_JOIN] -----------PhysicalLimit[LOCAL] -------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------PhysicalLimit[LOCAL] -----------------PhysicalOlapScan[t1] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t2] -----------PhysicalDistribute[DistributionSpecReplicated] +----PhysicalLimit[LOCAL] +------NestedLoopJoin[CROSS_JOIN] +--------PhysicalLimit[LOCAL] +----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------------PhysicalLimit[LOCAL] ---------------PhysicalOlapScan[t3] +--------------PhysicalOlapScan[t1] +------------PhysicalOlapScan[t2] +--------PhysicalLimit[LOCAL] +----------PhysicalOlapScan[t3] -- !limit_right_outer_join_full_outer_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() -----------PhysicalDistribute[DistributionSpecHash] -------------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t1] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t2] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t3] +----PhysicalLimit[LOCAL] +------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +--------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[t1] +----------PhysicalOlapScan[t2] +--------PhysicalOlapScan[t3] -- !limit_right_outer_join_cross_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------NestedLoopJoin[CROSS_JOIN] -----------PhysicalLimit[LOCAL] -------------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t1] apply RFs: RF0 ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalLimit[LOCAL] -------------------PhysicalOlapScan[t2] -----------PhysicalDistribute[DistributionSpecReplicated] +----PhysicalLimit[LOCAL] +------NestedLoopJoin[CROSS_JOIN] +--------PhysicalLimit[LOCAL] +----------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------------PhysicalOlapScan[t1] ------------PhysicalLimit[LOCAL] ---------------PhysicalOlapScan[t3] +--------------PhysicalOlapScan[t2] +--------PhysicalLimit[LOCAL] +----------PhysicalOlapScan[t3] -- !limit_full_outer_join_cross_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------NestedLoopJoin[CROSS_JOIN] -----------PhysicalLimit[LOCAL] -------------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t1] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t2] -----------PhysicalDistribute[DistributionSpecReplicated] -------------PhysicalLimit[LOCAL] ---------------PhysicalOlapScan[t3] +----PhysicalLimit[LOCAL] +------NestedLoopJoin[CROSS_JOIN] +--------PhysicalLimit[LOCAL] +----------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------------PhysicalOlapScan[t1] +------------PhysicalOlapScan[t2] +--------PhysicalLimit[LOCAL] +----------PhysicalOlapScan[t3] -- !limit_left_outer_join_right_outer_join_full_outer_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t4.id)) otherCondition=() -----------PhysicalDistribute[DistributionSpecHash] -------------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------------PhysicalOlapScan[t1] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalOlapScan[t2] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t3] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t4] +----PhysicalLimit[LOCAL] +------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t4.id)) otherCondition=() +--------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------------PhysicalOlapScan[t1] +------------PhysicalOlapScan[t2] +----------PhysicalOlapScan[t3] +--------PhysicalOlapScan[t4] -- !limit_left_outer_join_right_outer_join_cross_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------NestedLoopJoin[CROSS_JOIN] -----------PhysicalLimit[LOCAL] -------------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() build RFs:RF0 id->[id] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------------PhysicalOlapScan[t1] apply RFs: RF0 -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalOlapScan[t2] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalLimit[LOCAL] -------------------PhysicalOlapScan[t3] -----------PhysicalDistribute[DistributionSpecReplicated] +----PhysicalLimit[LOCAL] +------NestedLoopJoin[CROSS_JOIN] +--------PhysicalLimit[LOCAL] +----------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------------PhysicalOlapScan[t1] +--------------PhysicalOlapScan[t2] ------------PhysicalLimit[LOCAL] ---------------PhysicalOlapScan[t4] +--------------PhysicalOlapScan[t3] +--------PhysicalLimit[LOCAL] +----------PhysicalOlapScan[t4] -- !limit_left_outer_join_full_outer_join_cross_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------NestedLoopJoin[CROSS_JOIN] -----------PhysicalLimit[LOCAL] -------------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------------PhysicalOlapScan[t1] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalOlapScan[t2] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t3] -----------PhysicalDistribute[DistributionSpecReplicated] -------------PhysicalLimit[LOCAL] ---------------PhysicalOlapScan[t4] +----PhysicalLimit[LOCAL] +------NestedLoopJoin[CROSS_JOIN] +--------PhysicalLimit[LOCAL] +----------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------------PhysicalOlapScan[t1] +--------------PhysicalOlapScan[t2] +------------PhysicalOlapScan[t3] +--------PhysicalLimit[LOCAL] +----------PhysicalOlapScan[t4] -- !limit_right_outer_join_full_outer_join_cross_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------NestedLoopJoin[CROSS_JOIN] -----------PhysicalLimit[LOCAL] -------------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalOlapScan[t1] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalOlapScan[t2] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t3] -----------PhysicalDistribute[DistributionSpecReplicated] -------------PhysicalLimit[LOCAL] ---------------PhysicalOlapScan[t4] +----PhysicalLimit[LOCAL] +------NestedLoopJoin[CROSS_JOIN] +--------PhysicalLimit[LOCAL] +----------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +------------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------------PhysicalOlapScan[t1] +--------------PhysicalOlapScan[t2] +------------PhysicalOlapScan[t3] +--------PhysicalLimit[LOCAL] +----------PhysicalOlapScan[t4] -- !limit_left_outer_join_right_outer_join_full_outer_join_cross_join -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------NestedLoopJoin[CROSS_JOIN] -----------PhysicalLimit[LOCAL] -------------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t4.id)) otherCondition=() ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------------PhysicalOlapScan[t1] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalOlapScan[t2] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalOlapScan[t3] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t4] -----------PhysicalDistribute[DistributionSpecReplicated] -------------PhysicalLimit[LOCAL] ---------------PhysicalOlapScan[t4] +----PhysicalLimit[LOCAL] +------NestedLoopJoin[CROSS_JOIN] +--------PhysicalLimit[LOCAL] +----------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t4.id)) otherCondition=() +------------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +--------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------------PhysicalOlapScan[t1] +----------------PhysicalOlapScan[t2] +--------------PhysicalOlapScan[t3] +------------PhysicalOlapScan[t4] +--------PhysicalLimit[LOCAL] +----------PhysicalOlapScan[t4] diff --git a/regression-test/data/nereids_rules_p0/limit_push_down/order_push_down.out b/regression-test/data/nereids_rules_p0/limit_push_down/order_push_down.out index f22c364e9893203..b5cecee674e5ad0 100644 --- a/regression-test/data/nereids_rules_p0/limit_push_down/order_push_down.out +++ b/regression-test/data/nereids_rules_p0/limit_push_down/order_push_down.out @@ -2,99 +2,76 @@ -- !limit_offset_sort_project -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalOlapScan[t1] +----PhysicalTopN[LOCAL_SORT] +------PhysicalOlapScan[t1] -- !limit_sort_join -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !limit_sort_semi_join -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !right_semi_join_order -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !left_anti_join_order -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashJoin[LEFT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !right_anti_join_order -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashJoin[RIGHT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashJoin[RIGHT_ANTI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !full_outer_join_order -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !left_outer_join_order -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !right_outer_join_order -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t1] apply RFs: RF0 -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalOlapScan[t2] -- !cross_join_order -- PhysicalResultSink @@ -102,97 +79,78 @@ PhysicalResultSink ----PhysicalTopN[LOCAL_SORT] ------NestedLoopJoin[CROSS_JOIN] --------PhysicalTopN[MERGE_SORT] -----------PhysicalDistribute[DistributionSpecGather] -------------PhysicalTopN[LOCAL_SORT] ---------------PhysicalOlapScan[t1] ---------PhysicalDistribute[DistributionSpecReplicated] -----------PhysicalOlapScan[t2] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !limit_offset_sort_join -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !limit_sort_agg_having -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalOlapScan[t1] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[LOCAL] +--------PhysicalOlapScan[t1] -- !limit_offset_agg_having -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalOlapScan[t1] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[LOCAL] +--------PhysicalOlapScan[t1] -- !limit_offset_sort_agg_having -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalOlapScan[t1] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[LOCAL] +--------PhysicalOlapScan[t1] -- !limit_distinct -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------NestedLoopJoin[CROSS_JOIN] -----------------PhysicalOlapScan[t1] -----------------PhysicalDistribute[DistributionSpecReplicated] -------------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------NestedLoopJoin[CROSS_JOIN] +------------PhysicalOlapScan[t1] +------------PhysicalOlapScan[t2] -- !limit_distinct -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------NestedLoopJoin[CROSS_JOIN] -----------------PhysicalOlapScan[t1] -----------------PhysicalDistribute[DistributionSpecReplicated] -------------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------NestedLoopJoin[CROSS_JOIN] +------------PhysicalOlapScan[t1] +------------PhysicalOlapScan[t2] -- !limit_distinct -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------PhysicalOlapScan[t1] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------------PhysicalOlapScan[t1] +------------PhysicalOlapScan[t2] -- !limit_distinct -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------------PhysicalOlapScan[t1] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------------PhysicalOlapScan[t1] +------------PhysicalOlapScan[t2] -- !limit_window -- PhysicalResultSink @@ -200,10 +158,9 @@ PhysicalResultSink ----PhysicalLimit[LOCAL] ------PhysicalWindow --------PhysicalQuickSort[MERGE_SORT] -----------PhysicalDistribute[DistributionSpecGather] -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalPartitionTopN -----------------PhysicalOlapScan[t1] +----------PhysicalQuickSort[LOCAL_SORT] +------------PhysicalPartitionTopN +--------------PhysicalOlapScan[t1] -- !limit_sort_window -- PhysicalResultSink @@ -211,9 +168,8 @@ PhysicalResultSink ----PhysicalTopN[LOCAL_SORT] ------PhysicalWindow --------PhysicalQuickSort[MERGE_SORT] -----------PhysicalDistribute[DistributionSpecGather] -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalOlapScan[t1] +----------PhysicalQuickSort[LOCAL_SORT] +------------PhysicalOlapScan[t1] -- !limit_offset_window -- PhysicalResultSink @@ -221,10 +177,9 @@ PhysicalResultSink ----PhysicalLimit[LOCAL] ------PhysicalWindow --------PhysicalQuickSort[MERGE_SORT] -----------PhysicalDistribute[DistributionSpecGather] -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalPartitionTopN -----------------PhysicalOlapScan[t1] +----------PhysicalQuickSort[LOCAL_SORT] +------------PhysicalPartitionTopN +--------------PhysicalOlapScan[t1] -- !limit_offset_sort_window -- PhysicalResultSink @@ -232,97 +187,76 @@ PhysicalResultSink ----PhysicalTopN[LOCAL_SORT] ------PhysicalWindow --------PhysicalQuickSort[MERGE_SORT] -----------PhysicalDistribute[DistributionSpecGather] -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalOlapScan[t1] +----------PhysicalQuickSort[LOCAL_SORT] +------------PhysicalOlapScan[t1] -- !limit_sort_filter -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((t1.id = 1)) -----------PhysicalOlapScan[t1] +----PhysicalTopN[LOCAL_SORT] +------filter((t1.id = 1)) +--------PhysicalOlapScan[t1] -- !limit_offset_sort_filter -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((t1.id = 1)) -----------PhysicalOlapScan[t1] +----PhysicalTopN[LOCAL_SORT] +------filter((t1.id = 1)) +--------PhysicalOlapScan[t1] -- !limit_subquery_order_by_inside_limit_outside -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------PhysicalOlapScan[t1] +----PhysicalLimit[LOCAL] +------PhysicalOlapScan[t1] -- !limit_subquery_all_inside -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalOlapScan[t1] +----PhysicalTopN[LOCAL_SORT] +------PhysicalOlapScan[t1] -- !limit_set_operation -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalOlapScan[t1] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------PhysicalOlapScan[t1] +------------PhysicalOlapScan[t2] -- !limit_outside_order_inside_set_operation -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalLimit[LOCAL] ---------------------hashAgg[LOCAL] -----------------------PhysicalOlapScan[t1] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalLimit[LOCAL] ---------------------hashAgg[LOCAL] -----------------------PhysicalOlapScan[t2] +----PhysicalLimit[LOCAL] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------PhysicalLimit[LOCAL] +--------------hashAgg[LOCAL] +----------------PhysicalOlapScan[t1] +------------PhysicalLimit[LOCAL] +--------------hashAgg[LOCAL] +----------------PhysicalOlapScan[t2] -- !limit_inside_set_operation -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalUnion -------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------PhysicalOlapScan[t1] -------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------PhysicalTopN[MERGE_SORT] -----------------PhysicalDistribute[DistributionSpecGather] -------------------PhysicalTopN[LOCAL_SORT] ---------------------PhysicalOlapScan[t2] +--hashAgg[GLOBAL] +----hashAgg[LOCAL] +------PhysicalUnion +--------PhysicalOlapScan[t1] +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalOlapScan[t2] -- !limit_offset_set_operation -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalIntersect -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------PhysicalIntersect +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !limit_window -- PhysicalResultSink @@ -330,9 +264,8 @@ PhysicalResultSink ----PhysicalTopN[LOCAL_SORT] ------PhysicalWindow --------PhysicalQuickSort[MERGE_SORT] -----------PhysicalDistribute[DistributionSpecGather] -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalOlapScan[t1] +----------PhysicalQuickSort[LOCAL_SORT] +------------PhysicalOlapScan[t1] -- !limit_offset_window -- PhysicalResultSink @@ -340,70 +273,60 @@ PhysicalResultSink ----PhysicalTopN[LOCAL_SORT] ------PhysicalWindow --------PhysicalQuickSort[MERGE_SORT] -----------PhysicalDistribute[DistributionSpecGather] -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalOlapScan[t1] +----------PhysicalQuickSort[LOCAL_SORT] +------------PhysicalOlapScan[t1] -- !limit_filter -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((t1.id = 1)) -----------PhysicalOlapScan[t1] +----PhysicalTopN[LOCAL_SORT] +------filter((t1.id = 1)) +--------PhysicalOlapScan[t1] -- !limit_offset_filter -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((t1.id = 1)) -----------PhysicalOlapScan[t1] +----PhysicalTopN[LOCAL_SORT] +------filter((t1.id = 1)) +--------PhysicalOlapScan[t1] -- !limit_project_filter -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((t1.id > 100)) -----------PhysicalOlapScan[t1] +----PhysicalTopN[LOCAL_SORT] +------filter((t1.id > 100)) +--------PhysicalOlapScan[t1] -- !limit_join_filter -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----------filter((t1.id > 100)) -------------PhysicalOlapScan[t1] apply RFs: RF0 -----------PhysicalDistribute[DistributionSpecHash] -------------filter((t2.id > 100)) ---------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------filter((t1.id > 100)) +----------PhysicalOlapScan[t1] +--------filter((t2.id > 100)) +----------PhysicalOlapScan[t2] -- !limit_subquery -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalOlapScan[t1] +----PhysicalTopN[LOCAL_SORT] +------PhysicalOlapScan[t1] -- !limit_subquery_filter -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((t1.id > 100)) -----------PhysicalOlapScan[t1] +----PhysicalTopN[LOCAL_SORT] +------filter((t1.id > 100)) +--------PhysicalOlapScan[t1] -- !limit_subquery_join -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !limit_subquery_window -- PhysicalResultSink @@ -411,182 +334,138 @@ PhysicalResultSink ----PhysicalTopN[LOCAL_SORT] ------PhysicalWindow --------PhysicalQuickSort[MERGE_SORT] -----------PhysicalDistribute[DistributionSpecGather] -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalOlapScan[t1] +----------PhysicalQuickSort[LOCAL_SORT] +------------PhysicalOlapScan[t1] -- !limit_nested_subquery -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalOlapScan[t1] +----PhysicalTopN[LOCAL_SORT] +------PhysicalOlapScan[t1] -- !limit_union_filter -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------filter((t1.id > 100)) ---------------------PhysicalOlapScan[t1] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------filter((t2.id > 100)) ---------------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------filter((t1.id > 100)) +--------------PhysicalOlapScan[t1] +------------filter((t2.id > 100)) +--------------PhysicalOlapScan[t2] -- !limit_union_join -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalOlapScan[t1] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalOlapScan[t2] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t3.id = t4.id)) otherCondition=() ---------------------PhysicalOlapScan[t3] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------PhysicalOlapScan[t4] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------------PhysicalOlapScan[t1] +--------------PhysicalOlapScan[t2] +------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t3.id = t4.id)) otherCondition=() +--------------PhysicalOlapScan[t3] +--------------PhysicalOlapScan[t4] -- !limit_union_window -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalWindow ---------------------PhysicalQuickSort[MERGE_SORT] -----------------------PhysicalDistribute[DistributionSpecGather] -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalOlapScan[t1] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalWindow ---------------------PhysicalQuickSort[MERGE_SORT] -----------------------PhysicalDistribute[DistributionSpecGather] -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------PhysicalWindow +--------------PhysicalQuickSort[MERGE_SORT] +----------------PhysicalQuickSort[LOCAL_SORT] +------------------PhysicalOlapScan[t1] +------------PhysicalWindow +--------------PhysicalQuickSort[MERGE_SORT] +----------------PhysicalQuickSort[LOCAL_SORT] +------------------PhysicalOlapScan[t2] -- !limit_subquery_join_filter -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----------filter((t1.id > 100)) -------------PhysicalOlapScan[t1] apply RFs: RF0 -----------PhysicalDistribute[DistributionSpecHash] -------------filter((t2.id > 100)) ---------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------filter((t1.id > 100)) +----------PhysicalOlapScan[t1] +--------filter((t2.id > 100)) +----------PhysicalOlapScan[t2] -- !limit_subqueryjoin_window -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalWindow -----------PhysicalQuickSort[LOCAL_SORT] -------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------PhysicalOlapScan[t1] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------PhysicalWindow +--------PhysicalQuickSort[LOCAL_SORT] +----------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------------PhysicalOlapScan[t1] +------------PhysicalOlapScan[t2] -- !limit_subquery_union_filter -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------filter((t1.id > 100)) ---------------------PhysicalOlapScan[t1] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------filter((t2.id > 100)) ---------------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------filter((t1.id > 100)) +--------------PhysicalOlapScan[t1] +------------filter((t2.id > 100)) +--------------PhysicalOlapScan[t2] -- !limit_subquery_union_join -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------------PhysicalOlapScan[t1] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------PhysicalOlapScan[t2] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------hashJoin[INNER_JOIN] hashCondition=((t3.id = t4.id)) otherCondition=() build RFs:RF1 id->[id] ---------------------PhysicalOlapScan[t3] apply RFs: RF1 ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------PhysicalOlapScan[t4] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------------PhysicalOlapScan[t1] +--------------PhysicalOlapScan[t2] +------------hashJoin[INNER_JOIN] hashCondition=((t3.id = t4.id)) otherCondition=() +--------------PhysicalOlapScan[t3] +--------------PhysicalOlapScan[t4] -- !limit_subquery_union_window -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalWindow -----------PhysicalQuickSort[LOCAL_SORT] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalUnion -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalOlapScan[t1] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------PhysicalWindow +--------PhysicalQuickSort[LOCAL_SORT] +----------hashAgg[GLOBAL] +------------hashAgg[LOCAL] +--------------PhysicalUnion +----------------PhysicalOlapScan[t1] +----------------PhysicalOlapScan[t2] -- !limit_correlated_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((t2.id = t1.id)) otherCondition=() -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((t2.id = t1.id)) otherCondition=() +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !limit_correlated_subquery_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((t3.id = t1.id)) otherCondition=() -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------PhysicalOlapScan[t1] ---------PhysicalDistribute[DistributionSpecHash] -----------PhysicalOlapScan[t2] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t3] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((t3.id = t1.id)) otherCondition=() +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------PhysicalOlapScan[t1] +------PhysicalOlapScan[t2] +----PhysicalOlapScan[t3] -- !limit_correlated_subquery_window -- PhysicalResultSink --PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalWindow -----------PhysicalQuickSort[LOCAL_SORT] -------------hashJoin[LEFT_SEMI_JOIN] hashCondition=((t2.id = t1.id)) otherCondition=() ---------------PhysicalOlapScan[t1] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t2] +----PhysicalQuickSort[LOCAL_SORT] +------PhysicalWindow +--------PhysicalQuickSort[LOCAL_SORT] +----------hashJoin[LEFT_SEMI_JOIN] hashCondition=((t2.id = t1.id)) otherCondition=() +------------PhysicalOlapScan[t1] +------------PhysicalOlapScan[t2] -- !limit_cte_query -- PhysicalCteAnchor ( cteId=CTEId#0 ) @@ -595,10 +474,8 @@ PhysicalCteAnchor ( cteId=CTEId#0 ) ------PhysicalOlapScan[t1] --PhysicalResultSink ----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalCteConsumer ( cteId=CTEId#0 ) +------PhysicalTopN[LOCAL_SORT] +--------PhysicalCteConsumer ( cteId=CTEId#0 ) -- !limit_cte_outside_query -- PhysicalCteAnchor ( cteId=CTEId#0 ) @@ -607,22 +484,18 @@ PhysicalCteAnchor ( cteId=CTEId#0 ) ------PhysicalOlapScan[t1] --PhysicalResultSink ----PhysicalLimit[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalLimit[LOCAL] -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalCteConsumer ( cteId=CTEId#0 ) +------PhysicalLimit[LOCAL] +--------PhysicalCteConsumer ( cteId=CTEId#0 ) -- !limit_cte_outside_query -- PhysicalCteAnchor ( cteId=CTEId#0 ) --PhysicalCteProducer ( cteId=CTEId#0 ) ----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------filter((t1.id < 10)) -------------PhysicalOlapScan[t1] +------PhysicalTopN[LOCAL_SORT] +--------filter((t1.id < 10)) +----------PhysicalOlapScan[t1] --PhysicalResultSink -----PhysicalDistribute[DistributionSpecGather] -------PhysicalCteConsumer ( cteId=CTEId#0 ) +----PhysicalCteConsumer ( cteId=CTEId#0 ) -- !limit_cte_query_join -- PhysicalCteAnchor ( cteId=CTEId#0 ) @@ -635,13 +508,10 @@ PhysicalCteAnchor ( cteId=CTEId#0 ) --------PhysicalOlapScan[t2] ----PhysicalResultSink ------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------hashJoin[FULL_OUTER_JOIN] hashCondition=((cte1.id = cte2.id)) otherCondition=() ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalCteConsumer ( cteId=CTEId#1 ) +--------PhysicalTopN[LOCAL_SORT] +----------hashJoin[FULL_OUTER_JOIN] hashCondition=((cte1.id = cte2.id)) otherCondition=() +------------PhysicalCteConsumer ( cteId=CTEId#0 ) +------------PhysicalCteConsumer ( cteId=CTEId#1 ) -- !limit_cte_query_window -- PhysicalCteAnchor ( cteId=CTEId#0 ) @@ -653,39 +523,33 @@ PhysicalCteAnchor ( cteId=CTEId#0 ) ------PhysicalTopN[LOCAL_SORT] --------PhysicalWindow ----------PhysicalQuickSort[MERGE_SORT] -------------PhysicalDistribute[DistributionSpecGather] ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) +------------PhysicalQuickSort[LOCAL_SORT] +--------------PhysicalCteConsumer ( cteId=CTEId#0 ) -- !limit_project_filter -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((t1.id > 100)) -----------PhysicalOlapScan[t1] +----PhysicalTopN[LOCAL_SORT] +------filter((t1.id > 100)) +--------PhysicalOlapScan[t1] -- !limit_join_filter -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----------filter((t1.id > 100)) -------------PhysicalOlapScan[t1] apply RFs: RF0 -----------PhysicalDistribute[DistributionSpecHash] -------------filter((t2.id > 100)) ---------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------filter((t1.id > 100)) +----------PhysicalOlapScan[t1] +--------filter((t2.id > 100)) +----------PhysicalOlapScan[t2] -- !limit_subquery_join -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalOlapScan[t1] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t2] +----PhysicalTopN[LOCAL_SORT] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !limit_subquery_window -- PhysicalResultSink @@ -693,24 +557,21 @@ PhysicalResultSink ----PhysicalTopN[LOCAL_SORT] ------PhysicalWindow --------PhysicalQuickSort[MERGE_SORT] -----------PhysicalDistribute[DistributionSpecGather] -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalOlapScan[t1] +----------PhysicalQuickSort[LOCAL_SORT] +------------PhysicalOlapScan[t1] -- !limit_nested_subquery -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------PhysicalOlapScan[t1] +----PhysicalLimit[LOCAL] +------PhysicalOlapScan[t1] -- !limit_subquery_distinct -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalOlapScan[t1] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[LOCAL] +--------PhysicalOlapScan[t1] -- !limit_cross_join -- PhysicalResultSink @@ -718,62 +579,43 @@ PhysicalResultSink ----PhysicalTopN[LOCAL_SORT] ------NestedLoopJoin[CROSS_JOIN] --------PhysicalTopN[MERGE_SORT] -----------PhysicalDistribute[DistributionSpecGather] -------------PhysicalTopN[LOCAL_SORT] ---------------PhysicalOlapScan[t1] ---------PhysicalDistribute[DistributionSpecReplicated] -----------PhysicalOlapScan[t2] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !limit_multiple_left_outer_join -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] +----PhysicalTopN[LOCAL_SORT] +------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------------PhysicalTopN[MERGE_SORT] ----------------PhysicalTopN[LOCAL_SORT] -------------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------PhysicalTopN[MERGE_SORT] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------PhysicalTopN[LOCAL_SORT] -----------------------------PhysicalOlapScan[t1] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------PhysicalOlapScan[t2] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t3] +------------------PhysicalOlapScan[t1] +--------------PhysicalOlapScan[t2] +--------PhysicalOlapScan[t3] -- !limit_multiple_right_outer_join -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() -----------PhysicalDistribute[DistributionSpecHash] -------------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t1] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t2] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t3] +----PhysicalTopN[LOCAL_SORT] +------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +--------hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[t1] +----------PhysicalOlapScan[t2] +--------PhysicalOlapScan[t3] -- !limit_multiple_full_outerjoin -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() -----------PhysicalDistribute[DistributionSpecHash] -------------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t1] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t2] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t3] +----PhysicalTopN[LOCAL_SORT] +------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +--------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[t1] +----------PhysicalOlapScan[t2] +--------PhysicalOlapScan[t3] -- !limit_subquery_cross_join -- PhysicalResultSink @@ -781,66 +623,52 @@ PhysicalResultSink ----PhysicalTopN[LOCAL_SORT] ------NestedLoopJoin[CROSS_JOIN] --------PhysicalTopN[MERGE_SORT] -----------PhysicalDistribute[DistributionSpecGather] -------------PhysicalTopN[LOCAL_SORT] ---------------PhysicalOlapScan[t1] ---------PhysicalDistribute[DistributionSpecReplicated] -----------PhysicalOlapScan[t2] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalOlapScan[t1] +--------PhysicalOlapScan[t2] -- !limit_subquery_multiple_join -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashJoin[INNER_JOIN] hashCondition=((subq.id = t3.id)) otherCondition=() -----------hashJoin[INNER_JOIN] hashCondition=((subq.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[t1] -------------PhysicalDistribute[DistributionSpecHash] ---------------PhysicalOlapScan[t2] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t3] +----PhysicalTopN[LOCAL_SORT] +------hashJoin[INNER_JOIN] hashCondition=((subq.id = t3.id)) otherCondition=() +--------hashJoin[INNER_JOIN] hashCondition=((subq.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[t1] +----------PhysicalOlapScan[t2] +--------PhysicalOlapScan[t3] -- !limit_subquery_multiple_join_nested_subquery -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashJoin[INNER_JOIN] hashCondition=((subq2.id = t3.id)) otherCondition=() -----------hashJoin[INNER_JOIN] hashCondition=((subq2.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[t1] -------------PhysicalDistribute[DistributionSpecHash] ---------------PhysicalOlapScan[t2] -----------PhysicalDistribute[DistributionSpecHash] -------------PhysicalOlapScan[t3] +----PhysicalTopN[LOCAL_SORT] +------hashJoin[INNER_JOIN] hashCondition=((subq2.id = t3.id)) otherCondition=() +--------hashJoin[INNER_JOIN] hashCondition=((subq2.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[t1] +----------PhysicalOlapScan[t2] +--------PhysicalOlapScan[t3] -- !limit_subquery_multiple_join_nested_subquery_distinct -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((subq2.id = t3.id)) otherCondition=() -------------hashJoin[INNER_JOIN] hashCondition=((subq2.id = t2.id)) otherCondition=() ---------------PhysicalOlapScan[t1] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalOlapScan[t2] -------------PhysicalDistribute[DistributionSpecHash] ---------------PhysicalOlapScan[t3] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[LOCAL] +--------hashJoin[INNER_JOIN] hashCondition=((subq2.id = t3.id)) otherCondition=() +----------hashJoin[INNER_JOIN] hashCondition=((subq2.id = t2.id)) otherCondition=() +------------PhysicalOlapScan[t1] +------------PhysicalOlapScan[t2] +----------PhysicalOlapScan[t3] -- !limit_subquery_multiple_join_nested_subquery_distinct_filter -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------hashJoin[INNER_JOIN] hashCondition=((subq2.id = t3.id)) otherCondition=() build RFs:RF1 id->[id] -------------hashJoin[INNER_JOIN] hashCondition=((subq2.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------------filter((t1.id > 100)) -----------------PhysicalOlapScan[t1] apply RFs: RF0 RF1 ---------------PhysicalDistribute[DistributionSpecHash] -----------------filter((t2.id > 100)) -------------------PhysicalOlapScan[t2] -------------PhysicalDistribute[DistributionSpecHash] ---------------filter((t3.id > 100)) -----------------PhysicalOlapScan[t3] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[LOCAL] +--------hashJoin[INNER_JOIN] hashCondition=((subq2.id = t3.id)) otherCondition=() +----------hashJoin[INNER_JOIN] hashCondition=((subq2.id = t2.id)) otherCondition=() +------------filter((t1.id > 100)) +--------------PhysicalOlapScan[t1] +------------filter((t2.id > 100)) +--------------PhysicalOlapScan[t2] +----------filter((t3.id > 100)) +------------PhysicalOlapScan[t3] diff --git a/regression-test/data/nereids_rules_p0/pkfk/eliminate_inner.out b/regression-test/data/nereids_rules_p0/pkfk/eliminate_inner.out index bc465978ccbbcd2..bb2b48ac37d917e 100644 --- a/regression-test/data/nereids_rules_p0/pkfk/eliminate_inner.out +++ b/regression-test/data/nereids_rules_p0/pkfk/eliminate_inner.out @@ -4,9 +4,11 @@ simple_case -- !shape -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() -----PhysicalOlapScan[pkt] -----PhysicalOlapScan[fkt_not_null] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() +------PhysicalOlapScan[pkt] +------PhysicalDistribute[DistributionSpecHash] +--------PhysicalOlapScan[fkt_not_null] -- !res -- @@ -15,9 +17,11 @@ with_pk_col -- !shape -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() -----PhysicalOlapScan[pkt] -----PhysicalOlapScan[fkt_not_null] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() +------PhysicalOlapScan[pkt] +------PhysicalDistribute[DistributionSpecHash] +--------PhysicalOlapScan[fkt_not_null] -- !res -- @@ -26,11 +30,13 @@ with_pk_col -- !shape -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() -----PhysicalOlapScan[pkt] -----hashJoin[INNER_JOIN] hashCondition=((fkt_not_null1.fk = fkt_not_null2.fk)) otherCondition=() -------PhysicalOlapScan[fkt_not_null] -------PhysicalOlapScan[fkt_not_null] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() +------PhysicalOlapScan[pkt] +------PhysicalDistribute[DistributionSpecHash] +--------hashJoin[INNER_JOIN] hashCondition=((fkt_not_null1.fk = fkt_not_null2.fk)) otherCondition=() +----------PhysicalOlapScan[fkt_not_null] +----------PhysicalOlapScan[fkt_not_null] -- !res -- @@ -39,14 +45,16 @@ with_pk_col -- !shape -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() build RFs:RF1 fk->[pk] -----filter((pkt.pk > 1)) -------PhysicalOlapScan[pkt] apply RFs: RF1 -----hashJoin[INNER_JOIN] hashCondition=((fkt_not_null1.fk = fkt_not_null2.fk)) otherCondition=() build RFs:RF0 fk->[fk] -------filter((fkt_not_null1.fk > 1)) ---------PhysicalOlapScan[fkt_not_null] apply RFs: RF0 -------filter((fkt_not_null2.fk > 1)) ---------PhysicalOlapScan[fkt_not_null] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() +------filter((pkt.pk > 1)) +--------PhysicalOlapScan[pkt] +------PhysicalDistribute[DistributionSpecHash] +--------hashJoin[INNER_JOIN] hashCondition=((fkt_not_null1.fk = fkt_not_null2.fk)) otherCondition=() +----------filter((fkt_not_null1.fk > 1)) +------------PhysicalOlapScan[fkt_not_null] +----------filter((fkt_not_null2.fk > 1)) +------------PhysicalOlapScan[fkt_not_null] -- !res -- @@ -55,10 +63,12 @@ with_pk_col -- !shape -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() -----PhysicalOlapScan[pkt] -----hashAgg[LOCAL] -------PhysicalOlapScan[fkt_not_null] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() +------PhysicalOlapScan[pkt] +------PhysicalDistribute[DistributionSpecHash] +--------hashAgg[LOCAL] +----------PhysicalOlapScan[fkt_not_null] -- !res -- @@ -67,13 +77,18 @@ with_pk_col -- !shape -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() -----PhysicalOlapScan[pkt] -----hashAgg[GLOBAL] -------hashAgg[LOCAL] ---------PhysicalUnion -----------PhysicalOlapScan[fkt_not_null] -----------PhysicalOlapScan[fkt_not_null] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() +------PhysicalOlapScan[pkt] +------PhysicalDistribute[DistributionSpecHash] +--------hashAgg[GLOBAL] +----------PhysicalDistribute[DistributionSpecHash] +------------hashAgg[LOCAL] +--------------PhysicalUnion +----------------PhysicalDistribute[DistributionSpecExecutionAny] +------------------PhysicalOlapScan[fkt_not_null] +----------------PhysicalDistribute[DistributionSpecExecutionAny] +------------------PhysicalOlapScan[fkt_not_null] -- !res -- @@ -82,11 +97,13 @@ fk with window -- !shape -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() -----PhysicalOlapScan[pkt] -----PhysicalWindow -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalOlapScan[fkt_not_null] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() +------PhysicalOlapScan[pkt] +------PhysicalDistribute[DistributionSpecHash] +--------PhysicalWindow +----------PhysicalQuickSort[LOCAL_SORT] +------------PhysicalOlapScan[fkt_not_null] -- !res -- @@ -95,11 +112,14 @@ fk with limit -- !shape -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() build RFs:RF0 fk->[pk] -----PhysicalOlapScan[pkt] apply RFs: RF0 -----PhysicalLimit[GLOBAL] -------PhysicalLimit[LOCAL] ---------PhysicalOlapScan[fkt_not_null] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() +------PhysicalOlapScan[pkt] +------PhysicalDistribute[DistributionSpecHash] +--------PhysicalLimit[GLOBAL] +----------PhysicalDistribute[DistributionSpecGather] +------------PhysicalLimit[LOCAL] +--------------PhysicalOlapScan[fkt_not_null] -- !res -- @@ -108,11 +128,13 @@ pk with filter that same as fk -- !shape -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() build RFs:RF0 fk->[pk] -----filter((pkt.pk = 1)) -------PhysicalOlapScan[pkt] apply RFs: RF0 -----filter((fkt_not_null.fk = 1)) -------PhysicalOlapScan[fkt_not_null] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() +------filter((pkt.pk = 1)) +--------PhysicalOlapScan[pkt] +------PhysicalDistribute[DistributionSpecHash] +--------filter((fkt_not_null.fk = 1)) +----------PhysicalOlapScan[fkt_not_null] -- !res -- @@ -121,11 +143,13 @@ pk with filter that included same as fk -- !shape -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() build RFs:RF0 fk->[pk] -----filter((pkt.pk = 1)) -------PhysicalOlapScan[pkt] apply RFs: RF0 -----filter((cast(f as DOUBLE) = 1) and (fkt_not_null.fk = 1)) -------PhysicalOlapScan[fkt_not_null] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() +------filter((pkt.pk = 1)) +--------PhysicalOlapScan[pkt] +------PhysicalDistribute[DistributionSpecHash] +--------filter((cast(f as DOUBLE) = 1.0) and (fkt_not_null.fk = 1)) +----------PhysicalOlapScan[fkt_not_null] -- !res -- @@ -134,11 +158,13 @@ pk with filter that not same as fk -- !shape -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() build RFs:RF0 fk->[pk] -----filter((cast(p as DOUBLE) = 1) and (pkt.pk = 1)) -------PhysicalOlapScan[pkt] apply RFs: RF0 -----filter((cast(f as DOUBLE) = 1) and (fkt_not_null.fk = 1)) -------PhysicalOlapScan[fkt_not_null] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt_not_null.fk)) otherCondition=() +------filter((cast(p as DOUBLE) = 1.0) and (pkt.pk = 1)) +--------PhysicalOlapScan[pkt] +------PhysicalDistribute[DistributionSpecHash] +--------filter((cast(f as DOUBLE) = 1.0) and (fkt_not_null.fk = 1)) +----------PhysicalOlapScan[fkt_not_null] -- !res -- @@ -147,8 +173,9 @@ simple_case -- !shape -- PhysicalResultSink ---filter(( not fk IS NULL)) -----PhysicalOlapScan[fkt] +--PhysicalDistribute[DistributionSpecGather] +----filter(( not fk IS NULL)) +------PhysicalOlapScan[fkt] -- !res -- 1 John @@ -162,8 +189,9 @@ with_pk_col -- !shape -- PhysicalResultSink ---filter(( not fk IS NULL)) -----PhysicalOlapScan[fkt] +--PhysicalDistribute[DistributionSpecGather] +----filter(( not fk IS NULL)) +------PhysicalOlapScan[fkt] -- !res -- 1 John 1 @@ -177,10 +205,11 @@ with_pk_col -- !shape -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((pk = fkt2.fk)) otherCondition=() -----filter(( not fk IS NULL)) +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((pk = fkt2.fk)) otherCondition=() +------filter(( not fk IS NULL)) +--------PhysicalOlapScan[fkt] ------PhysicalOlapScan[fkt] -----PhysicalOlapScan[fkt] -- !res -- 1 John 1 @@ -198,11 +227,12 @@ with_pk_col -- !shape -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((pk = fkt2.fk)) otherCondition=() build RFs:RF0 fk->[fk] -----filter(( not fk IS NULL) and (fkt1.fk > 1)) -------PhysicalOlapScan[fkt] apply RFs: RF0 -----filter((fkt2.fk > 1)) -------PhysicalOlapScan[fkt] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((pk = fkt2.fk)) otherCondition=() +------filter(( not fk IS NULL) and (fkt1.fk > 1)) +--------PhysicalOlapScan[fkt] +------filter((fkt2.fk > 1)) +--------PhysicalOlapScan[fkt] -- !res -- 2 Alice 2 @@ -216,9 +246,10 @@ with_pk_col -- !shape -- PhysicalResultSink ---hashAgg[LOCAL] -----filter(( not fk IS NULL)) -------PhysicalOlapScan[fkt] +--PhysicalDistribute[DistributionSpecGather] +----hashAgg[LOCAL] +------filter(( not fk IS NULL)) +--------PhysicalOlapScan[fkt] -- !res -- 1 1 @@ -230,13 +261,18 @@ with_pk_col -- !shape -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt.fk)) otherCondition=() -----PhysicalOlapScan[pkt] -----hashAgg[GLOBAL] -------hashAgg[LOCAL] ---------PhysicalUnion -----------PhysicalOlapScan[fkt] -----------PhysicalOlapScan[fkt] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt.fk)) otherCondition=() +------PhysicalOlapScan[pkt] +------PhysicalDistribute[DistributionSpecHash] +--------hashAgg[GLOBAL] +----------PhysicalDistribute[DistributionSpecHash] +------------hashAgg[LOCAL] +--------------PhysicalUnion +----------------PhysicalDistribute[DistributionSpecExecutionAny] +------------------PhysicalOlapScan[fkt] +----------------PhysicalDistribute[DistributionSpecExecutionAny] +------------------PhysicalOlapScan[fkt] -- !res -- 1 John 1 @@ -248,10 +284,11 @@ fk with window -- !shape -- PhysicalResultSink ---PhysicalWindow -----PhysicalQuickSort[LOCAL_SORT] -------filter(( not fk IS NULL)) ---------PhysicalOlapScan[fkt] +--PhysicalDistribute[DistributionSpecGather] +----PhysicalWindow +------PhysicalQuickSort[LOCAL_SORT] +--------filter(( not fk IS NULL)) +----------PhysicalOlapScan[fkt] -- !res -- 1 1 1 @@ -267,18 +304,21 @@ fk with limit PhysicalResultSink --filter(( not fk IS NULL)) ----PhysicalLimit[GLOBAL] -------PhysicalLimit[LOCAL] ---------PhysicalOlapScan[fkt] +------PhysicalDistribute[DistributionSpecGather] +--------PhysicalLimit[LOCAL] +----------PhysicalOlapScan[fkt] -- !res -- +1 1 -- !name -- pk with filter that same as fk -- !shape -- PhysicalResultSink ---filter(( not fk IS NULL) and (fkt.fk = 1)) -----PhysicalOlapScan[fkt] +--PhysicalDistribute[DistributionSpecGather] +----filter(( not fk IS NULL) and (fkt.fk = 1)) +------PhysicalOlapScan[fkt] -- !res -- 1 John 1 @@ -289,8 +329,9 @@ pk with filter that included same as fk -- !shape -- PhysicalResultSink ---filter(( not fk IS NULL) and (cast(f as DOUBLE) = 1) and (fkt.fk = 1)) -----PhysicalOlapScan[fkt] +--PhysicalDistribute[DistributionSpecGather] +----filter(( not fk IS NULL) and (cast(f as DOUBLE) = 1.0) and (fkt.fk = 1)) +------PhysicalOlapScan[fkt] -- !res -- @@ -299,11 +340,13 @@ pk with filter that not same as fk -- !shape -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt.fk)) otherCondition=() build RFs:RF0 fk->[pk] -----filter((cast(p as DOUBLE) = 1) and (pkt.pk = 1)) -------PhysicalOlapScan[pkt] apply RFs: RF0 -----filter((cast(f as DOUBLE) = 1) and (fkt.fk = 1)) -------PhysicalOlapScan[fkt] +--PhysicalDistribute[DistributionSpecGather] +----hashJoin[INNER_JOIN] hashCondition=((pkt.pk = fkt.fk)) otherCondition=() +------filter((cast(p as DOUBLE) = 1.0) and (pkt.pk = 1)) +--------PhysicalOlapScan[pkt] +------PhysicalDistribute[DistributionSpecHash] +--------filter((cast(f as DOUBLE) = 1.0) and (fkt.fk = 1)) +----------PhysicalOlapScan[fkt] -- !res -- diff --git a/regression-test/data/nereids_rules_p0/predicate_infer/infer_predicate.out b/regression-test/data/nereids_rules_p0/predicate_infer/infer_predicate.out index daee36163a25a26..15144b566b04749 100644 --- a/regression-test/data/nereids_rules_p0/predicate_infer/infer_predicate.out +++ b/regression-test/data/nereids_rules_p0/predicate_infer/infer_predicate.out @@ -1,474 +1,397 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !infer_predicate_basic_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------filter((t1.score > 10)) ---------PhysicalOlapScan[t] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((t1.score > 10)) ------PhysicalOlapScan[t] +----PhysicalOlapScan[t] -- !infer_predicate_join_with_filter -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------filter((t1.score > 10)) ---------PhysicalOlapScan[t] apply RFs: RF0 -------filter((t2.name = 'Alice')) ---------PhysicalOlapScan[t] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((t1.score > 10)) +------PhysicalOlapScan[t] +----filter((t2.name = 'Alice')) +------PhysicalOlapScan[t] -- !infer_predicate_left_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------filter((t1.score > 20)) ---------PhysicalOlapScan[t] +--hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((t1.score > 20)) ------PhysicalOlapScan[t] +----PhysicalOlapScan[t] -- !infer_predicate_right_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------PhysicalOlapScan[t] apply RFs: RF0 -------filter((t2.score > 20)) ---------PhysicalOlapScan[t] +--hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----PhysicalOlapScan[t] +----filter((t2.score > 20)) +------PhysicalOlapScan[t] -- !infer_predicate_full_outer_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter(((t1.name = 'Test') OR (t2.name = 'Test'))) -------hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------PhysicalOlapScan[t] ---------PhysicalOlapScan[t] +--filter(((t1.name = 'Test') OR (t2.name = 'Test'))) +----hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------PhysicalOlapScan[t] +------PhysicalOlapScan[t] -- !infer_predicate_left_semi_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------filter((t1.score > 20)) ---------PhysicalOlapScan[t] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((t1.score > 20)) ------PhysicalOlapScan[t] +----PhysicalOlapScan[t] -- !infer_predicate_left_anti_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------filter((t1.score > 20)) ---------PhysicalOlapScan[t] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((t1.score > 20)) ------PhysicalOlapScan[t] +----PhysicalOlapScan[t] -- !infer_predicate_from_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------filter((t1.id = 1)) ---------PhysicalOlapScan[t] apply RFs: RF0 -------filter((t2.id = 1)) ---------PhysicalOlapScan[t] +--hashJoin[INNER_JOIN] hashCondition=((t.id = t2.id)) otherCondition=() +----filter((t1.id = 1)) +------PhysicalOlapScan[t] +----filter((t2.id = 1)) +------PhysicalOlapScan[t] -- !infer_predicate_multi_level_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t2.id = t3.id)) otherCondition=() build RFs:RF1 id->[id] -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------PhysicalOlapScan[t] apply RFs: RF0 ---------PhysicalOlapScan[t] apply RFs: RF1 -------filter((t3.name = 'Test')) ---------PhysicalOlapScan[t] +--hashJoin[INNER_JOIN] hashCondition=((t2.id = t3.id)) otherCondition=() +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------PhysicalOlapScan[t] +------PhysicalOlapScan[t] +----filter((t3.name = 'Test')) +------PhysicalOlapScan[t] -- !infer_predicate_join_with_project_limit -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ----PhysicalLimit[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalLimit[LOCAL] -----------PhysicalOlapScan[t] apply RFs: RF0 -----PhysicalDistribute[DistributionSpecReplicated] -------filter((t2.score > 20)) +------PhysicalLimit[LOCAL] --------PhysicalOlapScan[t] +----filter((t2.score > 20)) +------PhysicalOlapScan[t] -- !infer_predicate_with_union -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t.id = t3.id)) otherCondition=() -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalUnion ---------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------filter((t1.id = 1)) -------------------PhysicalOlapScan[t] ---------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------PhysicalOlapScan[t] -------PhysicalDistribute[DistributionSpecReplicated] ---------PhysicalOlapScan[t] +--hashJoin[INNER_JOIN] hashCondition=((t.id = t3.id)) otherCondition=() +----hashAgg[GLOBAL] +------hashAgg[LOCAL] +--------PhysicalUnion +----------filter((t1.id = 1)) +------------PhysicalOlapScan[t] +----------PhysicalOlapScan[t] +----PhysicalOlapScan[t] -- !infer_predicate_with_except -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t.id = t3.id)) otherCondition=() -------PhysicalExcept ---------PhysicalDistribute[DistributionSpecHash] -----------PhysicalOlapScan[t] ---------PhysicalDistribute[DistributionSpecHash] -----------PhysicalOlapScan[t] -------PhysicalDistribute[DistributionSpecReplicated] ---------PhysicalOlapScan[t] +--hashJoin[INNER_JOIN] hashCondition=((t.id = t3.id)) otherCondition=() +----PhysicalExcept +------PhysicalOlapScan[t] +------PhysicalOlapScan[t] +----PhysicalOlapScan[t] -- !infer_predicate_with_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t.id)) otherCondition=() build RFs:RF0 id->[id] -------PhysicalOlapScan[t] apply RFs: RF0 -------filter((t.score > 60)) ---------PhysicalOlapScan[t] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t.id)) otherCondition=() +----PhysicalOlapScan[t] +----filter((t.score > 60)) +------PhysicalOlapScan[t] -- !infer_predicate_complex_condition -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.score > t2.score)) -------filter((t1.name = 'Test')) ---------PhysicalOlapScan[t] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.score > t2.score)) +----filter((t1.name = 'Test')) ------PhysicalOlapScan[t] +----PhysicalOlapScan[t] -- !infer_predicate_with_window_function -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----PhysicalWindow -------PhysicalQuickSort[LOCAL_SORT] ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -----------PhysicalOlapScan[t] apply RFs: RF0 -----------filter((t2.name = 'Charlie')) -------------PhysicalOlapScan[t] +--PhysicalWindow +----PhysicalQuickSort[LOCAL_SORT] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t] +--------filter((t2.name = 'Charlie')) +----------PhysicalOlapScan[t] -- !infer_predicate_with_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[LOCAL] -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------filter((t1.id > 70)) -----------PhysicalOlapScan[t] apply RFs: RF0 ---------filter((t2.id > 70)) -----------PhysicalOlapScan[t] +--hashAgg[LOCAL] +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id > 70)) +--------PhysicalOlapScan[t] +------filter((t2.id > 70)) +--------PhysicalOlapScan[t] -- !infer_predicate_complex_and_or_logic -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=(((t1.score > 80) OR ((t2.name = 'Dave') AND (t1.id < 50)))) -------filter(((t1.score > 80) OR (t1.id < 50))) ---------PhysicalOlapScan[t] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=(((t1.score > 80) OR ((t2.name = 'Dave') AND (t1.id < 50)))) +----filter(((t1.score > 80) OR (t1.id < 50))) ------PhysicalOlapScan[t] +----PhysicalOlapScan[t] -- !infer_predicate_multiple_join_filter -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() build RFs:RF0 id->[id];RF1 name->[name] -------filter((t1.score > 90)) ---------PhysicalOlapScan[t] apply RFs: RF0 RF1 -------filter((t2.score < 60)) ---------PhysicalOlapScan[t] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=() +----filter((t1.score > 90)) +------PhysicalOlapScan[t] +----filter((t2.score < 60)) +------PhysicalOlapScan[t] -- !infer_predicate_join_with_not_exists -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_ANTI_JOIN] hashCondition=((t2.id = t1.id)) otherCondition=() +--hashJoin[LEFT_ANTI_JOIN] hashCondition=((t2.id = t1.id)) otherCondition=() +----PhysicalOlapScan[t] +----filter((t2.score > 100)) ------PhysicalOlapScan[t] -------filter((t2.score > 100)) ---------PhysicalOlapScan[t] -- !infer_predicate_complex_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------PhysicalOlapScan[t] apply RFs: RF0 -------filter((t2.name = 'Frank') and (t2.score > 110)) ---------PhysicalOlapScan[t] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----PhysicalOlapScan[t] +----filter((t2.name = 'Frank') and (t2.score > 110)) +------PhysicalOlapScan[t] -- !infer_predicate_join_with_function_processed -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((expr_length(name) = expr_length(name))) otherCondition=() -------filter((t1.score > 120)) ---------PhysicalOlapScan[t] -------PhysicalDistribute[DistributionSpecReplicated] ---------PhysicalOlapScan[t] +--hashJoin[INNER_JOIN] hashCondition=((expr_length(name) = expr_length(name))) otherCondition=() +----filter((t1.score > 120)) +------PhysicalOlapScan[t] +----PhysicalOlapScan[t] -- !infer_predicate_nested_subqueries -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter((t.score > 130) and (t1.id < 70) and (t2.name = 'George')) -------PhysicalOlapScan[t] +--filter((t.score > 130) and (t1.id < 70) and (t2.name = 'George')) +----PhysicalOlapScan[t] -- !infer_predicate_join_with_aggregate_having -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter((sum(score) > 140)) -------hashAgg[LOCAL] ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------PhysicalOlapScan[t] -----------PhysicalOlapScan[t] +--filter((sum(score) > 140)) +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------PhysicalOlapScan[t] +--------PhysicalOlapScan[t] -- !infer_predicate_mixed_join_types -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() build RFs:RF0 id->[id] -------hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------PhysicalOlapScan[t] apply RFs: RF0 ---------PhysicalOlapScan[t] -------filter((t3.score > 150)) ---------PhysicalOlapScan[t] +--hashJoin[RIGHT_OUTER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +----hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------PhysicalOlapScan[t] +------PhysicalOlapScan[t] +----filter((t3.score > 150)) +------PhysicalOlapScan[t] -- !infer_predicate_join_with_distinct -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[LOCAL] -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------filter((t1.score > 160)) -----------PhysicalOlapScan[t] +--hashAgg[LOCAL] +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.score > 160)) --------PhysicalOlapScan[t] +------PhysicalOlapScan[t] -- !infer_predicate_join_with_case_when -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------filter((if((score > 170), 'high', 'low') = 'high')) ---------PhysicalOlapScan[t] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((if((score > 170), 'high', 'low') = 'high')) ------PhysicalOlapScan[t] +----PhysicalOlapScan[t] -- !infer_predicate_self_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------filter((t1.score > 10)) ---------PhysicalOlapScan[t] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((t1.score > 10)) ------PhysicalOlapScan[t] +----PhysicalOlapScan[t] -- !infer_predicate_complex_multitable_join -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() build RFs:RF1 id->[id] -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------filter((t1.score > 20)) -----------PhysicalOlapScan[t] apply RFs: RF1 ---------PhysicalOlapScan[t] -------filter((t3.name = 'Helen')) +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.score > 20)) --------PhysicalOlapScan[t] +------PhysicalOlapScan[t] +----filter((t3.name = 'Helen')) +------PhysicalOlapScan[t] -- !infer_predicate_aggregate_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter((t_agg.total > 30)) -------hashAgg[LOCAL] ---------PhysicalOlapScan[t] +--filter((t_agg.total > 30)) +----hashAgg[LOCAL] +------PhysicalOlapScan[t] -- !infer_predicate_join_with_function -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----NestedLoopJoin[INNER_JOIN](abs((score - score)) < 40) -------PhysicalOlapScan[t] -------PhysicalDistribute[DistributionSpecReplicated] ---------PhysicalOlapScan[t] +--NestedLoopJoin[INNER_JOIN](abs((score - score)) < 40) +----PhysicalOlapScan[t] +----PhysicalOlapScan[t] -- !infer_predicate_subquery_filter -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t.id)) otherCondition=() build RFs:RF0 id->[id] -------PhysicalOlapScan[t] apply RFs: RF0 -------filter((t.score > 50)) ---------PhysicalOlapScan[t] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t.id)) otherCondition=() +----PhysicalOlapScan[t] +----filter((t.score > 50)) +------PhysicalOlapScan[t] -- !infer_predicate_with_not_operator -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter((t1.score <= 60)) -------PhysicalOlapScan[t] +--filter((t1.score <= 60)) +----PhysicalOlapScan[t] -- !infer_predicate_complex_nested_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((t.score = t.score)) otherCondition=() build RFs:RF0 score->[score] -------filter((t.score > 80) and (t1.id > 10)) ---------PhysicalOlapScan[t] apply RFs: RF0 -------PhysicalDistribute[DistributionSpecReplicated] ---------filter((t.score > 80)) -----------PhysicalOlapScan[t] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((t.score = t.score)) otherCondition=() +----filter((t.score > 80) and (t1.id > 10)) +------PhysicalOlapScan[t] +----filter((t.score > 80)) +------PhysicalOlapScan[t] -- !infer_predicate_multi_join_subquery_aggregate -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[LOCAL] -------hashJoin[LEFT_SEMI_JOIN] hashCondition=((t2.id = t.id)) otherCondition=() build RFs:RF2 id->[id] ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------------PhysicalOlapScan[t] apply RFs: RF0 -------------PhysicalOlapScan[t] apply RFs: RF2 +--hashAgg[LOCAL] +----hashJoin[LEFT_SEMI_JOIN] hashCondition=((t2.id = t.id)) otherCondition=() +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +--------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ----------PhysicalOlapScan[t] ---------filter((t.score > 100)) ----------PhysicalOlapScan[t] +--------PhysicalOlapScan[t] +------filter((t.score > 100)) +--------PhysicalOlapScan[t] -- !infer_predicate_multi_join_complex_condition_not_exists -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_ANTI_JOIN] hashCondition=((t4.id = t3.id)) otherCondition=() -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -----------filter((t1.score > 110)) -------------PhysicalOlapScan[t] +--hashJoin[LEFT_ANTI_JOIN] hashCondition=((t4.id = t3.id)) otherCondition=() +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------filter((t1.score > 110)) ----------PhysicalOlapScan[t] --------PhysicalOlapScan[t] ------PhysicalOlapScan[t] +----PhysicalOlapScan[t] -- !infer_predicate_multi_join_complex_subquery -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() build RFs:RF1 id->[id] -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------PhysicalOlapScan[t] apply RFs: RF1 ---------PhysicalOlapScan[t] -------filter((t.score > 130)) ---------PhysicalOlapScan[t] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------PhysicalOlapScan[t] +------PhysicalOlapScan[t] +----filter((t.score > 130)) +------PhysicalOlapScan[t] -- !infer_predicate_multi_join_with_having_clause -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----filter((sum(score) > 150)) -------hashAgg[LOCAL] ---------hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() -----------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() -------------PhysicalOlapScan[t] -------------PhysicalOlapScan[t] +--filter((sum(score) > 150)) +----hashAgg[LOCAL] +------hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +--------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----------PhysicalOlapScan[t] ----------PhysicalOlapScan[t] +--------PhysicalOlapScan[t] -- !infer0 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------filter((t1.id = 1)) ---------PhysicalOlapScan[t1] apply RFs: RF0 -------PhysicalDistribute[DistributionSpecHash] ---------filter((t2.id = 1)) -----------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((t1.id = 1)) +------PhysicalOlapScan[t1] +----filter((t2.id = 1)) +------PhysicalOlapScan[t2] -- !infer1 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() build RFs:RF1 id->[id] -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] ---------filter((t1.id = 1)) -----------PhysicalOlapScan[t1] apply RFs: RF0 RF1 ---------PhysicalDistribute[DistributionSpecHash] -----------filter((t2.id = 1)) -------------PhysicalOlapScan[t2] -------PhysicalDistribute[DistributionSpecHash] ---------filter((t3.id = 1)) -----------PhysicalOlapScan[t] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id = 1)) +--------PhysicalOlapScan[t1] +------filter((t2.id = 1)) +--------PhysicalOlapScan[t2] +----filter((t3.id = 1)) +------PhysicalOlapScan[t] -- !infer2 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.id = 1)) -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------filter((t2.id = 1)) -----------PhysicalOlapScan[t2] +--hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.id = 1)) +----PhysicalOlapScan[t1] +----filter((t2.id = 1)) +------PhysicalOlapScan[t2] -- !infer3 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.id = 1)) -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------PhysicalOlapScan[t2] +--hashJoin[FULL_OUTER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=((t1.id = 1)) +----PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !infer4 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------filter((t1.id = 1)) ---------PhysicalOlapScan[t1] apply RFs: RF0 -------PhysicalDistribute[DistributionSpecHash] ---------filter((t2.id = 1)) -----------PhysicalOlapScan[t2] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((t1.id = 1)) +------PhysicalOlapScan[t1] +----filter((t2.id = 1)) +------PhysicalOlapScan[t2] -- !infer5 -- PhysicalResultSink ---hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() build RFs:RF1 id->[id] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t3.id)) otherCondition=() +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ------filter((t1.id = 1)) --------PhysicalLimit[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------PhysicalLimit[LOCAL] ---------------PhysicalOlapScan[t1] apply RFs: RF0 RF1 -------PhysicalDistribute[DistributionSpecReplicated] ---------filter((t2.id = 1)) -----------PhysicalOlapScan[t2] -----PhysicalDistribute[DistributionSpecReplicated] -------filter((t3.id = 1)) ---------PhysicalOlapScan[t] +----------PhysicalLimit[LOCAL] +------------PhysicalOlapScan[t1] +------filter((t2.id = 1)) +--------PhysicalOlapScan[t2] +----filter((t3.id = 1)) +------PhysicalOlapScan[t] -- !infer6 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=((t1.id = 1)) -------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecHash] ---------filter((t2.id = 1) and (t2.name = 'bob')) -----------PhysicalOlapScan[t2] +--hashJoin[LEFT_OUTER_JOIN] hashCondition=((t1.id = t2.id) and (t1.name = t2.name)) otherCondition=((t1.id = 1)) +----PhysicalOlapScan[t1] +----filter((t2.id = 1) and (t2.name = 'bob')) +------PhysicalOlapScan[t2] -- !infer7 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t12.id = t34.id)) otherCondition=() build RFs:RF2 id->[id] -------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF1 id->[id] ---------filter((t1.id < 9) and (t1.id > 1)) -----------PhysicalOlapScan[t1] apply RFs: RF1 RF2 ---------PhysicalDistribute[DistributionSpecHash] -----------filter((t2.id < 9) and (t2.id > 1)) -------------PhysicalOlapScan[t2] -------PhysicalDistribute[DistributionSpecHash] ---------hashJoin[INNER_JOIN] hashCondition=((t3.id = t4.id)) otherCondition=() build RFs:RF0 id->[id] -----------filter(( not (id = 3)) and (t34.id < 9) and (t34.id > 1)) -------------PhysicalOlapScan[t3] apply RFs: RF0 -----------PhysicalDistribute[DistributionSpecHash] -------------filter(( not (id = 4)) and (t4.id < 9) and (t4.id > 1)) ---------------PhysicalOlapScan[t4] +--hashJoin[INNER_JOIN] hashCondition=((t12.id = t34.id)) otherCondition=() +----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +------filter((t1.id < 9) and (t1.id > 1)) +--------PhysicalOlapScan[t1] +------filter((t2.id < 9) and (t2.id > 1)) +--------PhysicalOlapScan[t2] +----hashJoin[INNER_JOIN] hashCondition=((t3.id = t4.id)) otherCondition=() +------filter(( not (id = 3)) and (t34.id < 9) and (t34.id > 1)) +--------PhysicalOlapScan[t3] +------filter(( not (id = 4)) and (t4.id < 9) and (t4.id > 1)) +--------PhysicalOlapScan[t4] -- !infer8 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----NestedLoopJoin[INNER_JOIN]( not (id = id)) -------filter((t1.id = 1)) ---------PhysicalOlapScan[t1] -------PhysicalDistribute[DistributionSpecReplicated] ---------PhysicalOlapScan[t2] +--NestedLoopJoin[INNER_JOIN]( not (id = id)) +----filter((t1.id = 1)) +------PhysicalOlapScan[t1] +----PhysicalOlapScan[t2] -- !infer9 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() build RFs:RF0 id->[id] -------filter((cast(id as BIGINT) = 2147483648)) ---------PhysicalOlapScan[t1] apply RFs: RF0 -------PhysicalDistribute[DistributionSpecHash] ---------filter((cast(id as BIGINT) = 2147483648)) -----------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +----filter((cast(id as BIGINT) = 2147483648)) +------PhysicalOlapScan[t1] +----filter((cast(id as BIGINT) = 2147483648)) +------PhysicalOlapScan[t2] -- !infer10 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((expr_cast(id as SMALLINT) = expr_cast(id as SMALLINT))) otherCondition=() build RFs:RF0 expr_cast(id as SMALLINT)->[id] -------filter((cast(id as BIGINT) = 2147483648)) ---------PhysicalOlapScan[t1] apply RFs: RF0 -------PhysicalDistribute[DistributionSpecReplicated] ---------filter((cast(id as BIGINT) = 2147483648)) -----------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((expr_cast(id as SMALLINT) = expr_cast(id as SMALLINT))) otherCondition=() +----filter((cast(id as BIGINT) = 2147483648)) +------PhysicalOlapScan[t1] +----filter((cast(id as BIGINT) = 2147483648)) +------PhysicalOlapScan[t2] -- !infer11 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[INNER_JOIN] hashCondition=((expr_cast(id as LARGEINT) = expr_cast(id as LARGEINT))) otherCondition=() build RFs:RF0 expr_cast(id as LARGEINT)->[id] -------filter((cast(id as BIGINT) = 2147483648)) ---------PhysicalOlapScan[t1] apply RFs: RF0 -------PhysicalDistribute[DistributionSpecReplicated] ---------filter((cast(id as BIGINT) = 2147483648)) -----------PhysicalOlapScan[t2] +--hashJoin[INNER_JOIN] hashCondition=((expr_cast(id as LARGEINT) = expr_cast(id as LARGEINT))) otherCondition=() +----filter((cast(id as BIGINT) = 2147483648)) +------PhysicalOlapScan[t1] +----filter((cast(id as BIGINT) = 2147483648)) +------PhysicalOlapScan[t2] diff --git a/regression-test/data/nereids_rules_p0/push_down_limit_distinct/push_down_limit_distinct.out b/regression-test/data/nereids_rules_p0/push_down_limit_distinct/push_down_limit_distinct.out index e58bfeeb94d91bc..9ffe95203875019 100644 --- a/regression-test/data/nereids_rules_p0/push_down_limit_distinct/push_down_limit_distinct.out +++ b/regression-test/data/nereids_rules_p0/push_down_limit_distinct/push_down_limit_distinct.out @@ -2,18 +2,14 @@ -- !basic -- PhysicalResultSink --PhysicalLimit[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalLimit[LOCAL] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalLimit[LOCAL] ---------------------hashAgg[LOCAL] -----------------------PhysicalOlapScan[t] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalLimit[LOCAL] ---------------------hashAgg[LOCAL] -----------------------PhysicalOlapScan[t] +----PhysicalLimit[LOCAL] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------PhysicalLimit[LOCAL] +--------------hashAgg[LOCAL] +----------------PhysicalOlapScan[t] +------------PhysicalLimit[LOCAL] +--------------hashAgg[LOCAL] +----------------PhysicalOlapScan[t] diff --git a/regression-test/data/nereids_rules_p0/push_down_top_n/push_down_top_n_distinct_through_union.out b/regression-test/data/nereids_rules_p0/push_down_top_n/push_down_top_n_distinct_through_union.out index d15bd9ed52bfae5..58dfe43639ab14c 100644 --- a/regression-test/data/nereids_rules_p0/push_down_top_n/push_down_top_n_distinct_through_union.out +++ b/regression-test/data/nereids_rules_p0/push_down_top_n/push_down_top_n_distinct_through_union.out @@ -2,148 +2,106 @@ -- !push_down_topn_through_union -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalOlapScan[table2] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalOlapScan[table2] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------PhysicalOlapScan[table2] +------------PhysicalOlapScan[table2] -- !push_down_topn_union_with_conditions -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------filter((t1.score > 10)) ---------------------PhysicalOlapScan[table2] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------filter((t2.name = 'Test')) ---------------------PhysicalOlapScan[table2] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------filter((t3.id < 5)) ---------------------PhysicalOlapScan[table2] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------filter((t1.score > 10)) +--------------PhysicalOlapScan[table2] +------------filter((t2.name = 'Test')) +--------------PhysicalOlapScan[table2] +------------filter((t3.id < 5)) +--------------PhysicalOlapScan[table2] -- !push_down_topn_union_with_order_by -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalOlapScan[table2] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalOlapScan[table2] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalOlapScan[table2] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------PhysicalOlapScan[table2] +------------PhysicalOlapScan[table2] +------------PhysicalOlapScan[table2] -- !push_down_topn_nested_union -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalOlapScan[table2] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalOlapScan[table2] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalOlapScan[table2] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalOlapScan[table2] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------PhysicalOlapScan[table2] +------------PhysicalOlapScan[table2] +------------PhysicalOlapScan[table2] +------------PhysicalOlapScan[table2] -- !push_down_topn_union_after_join -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------------PhysicalOlapScan[table2] ---------------------PhysicalOlapScan[table2] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalOlapScan[table2] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------------PhysicalOlapScan[table2] +--------------PhysicalOlapScan[table2] +------------PhysicalOlapScan[table2] -- !push_down_topn_union_different_projections -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalOlapScan[table2] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalOlapScan[table2] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------PhysicalOlapScan[table2] +------------PhysicalOlapScan[table2] -- !push_down_topn_union_with_subquery -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------filter((table2.score > 20)) ---------------------PhysicalOlapScan[table2] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalOlapScan[table2] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------filter((table2.score > 20)) +--------------PhysicalOlapScan[table2] +------------PhysicalOlapScan[table2] -- !push_down_topn_union_with_limit -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalLimit[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalLimit[LOCAL] -------------------------PhysicalOlapScan[table2] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalLimit[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalLimit[LOCAL] -------------------------PhysicalOlapScan[table2] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------PhysicalLimit[GLOBAL] +--------------PhysicalLimit[LOCAL] +----------------PhysicalOlapScan[table2] +------------PhysicalLimit[GLOBAL] +--------------PhysicalLimit[LOCAL] +----------------PhysicalOlapScan[table2] -- !push_down_topn_union_complex_conditions -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------filter((t1.name = 'Test') and (t1.score > 10)) ---------------------PhysicalOlapScan[table2] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------filter((t2.id < 5) and (t2.score < 20)) ---------------------PhysicalOlapScan[table2] +----PhysicalTopN[LOCAL_SORT] +------hashAgg[GLOBAL] +--------hashAgg[LOCAL] +----------PhysicalUnion +------------filter((t1.name = 'Test') and (t1.score > 10)) +--------------PhysicalOlapScan[table2] +------------filter((t2.id < 5) and (t2.score < 20)) +--------------PhysicalOlapScan[table2] diff --git a/regression-test/data/nereids_rules_p0/push_down_top_n/push_down_top_n_through_union.out b/regression-test/data/nereids_rules_p0/push_down_top_n/push_down_top_n_through_union.out index 1135cd52708f50e..285156732dec3cd 100644 --- a/regression-test/data/nereids_rules_p0/push_down_top_n/push_down_top_n_through_union.out +++ b/regression-test/data/nereids_rules_p0/push_down_top_n/push_down_top_n_through_union.out @@ -2,185 +2,132 @@ -- !push_down_topn_through_union -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalUnion -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------PhysicalOlapScan[table1] -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------PhysicalOlapScan[table1] +----PhysicalTopN[LOCAL_SORT] +------PhysicalUnion +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalOlapScan[table1] +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalOlapScan[table1] -- !push_down_topn_union_with_conditions -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalUnion -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------filter((t1.score > 10)) ---------------------PhysicalOlapScan[table1] -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------filter((t2.name = 'Test')) ---------------------PhysicalOlapScan[table1] -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------filter((t3.id < 5)) ---------------------PhysicalOlapScan[table1] +----PhysicalTopN[LOCAL_SORT] +------PhysicalUnion +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------filter((t1.score > 10)) +--------------PhysicalOlapScan[table1] +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------filter((t2.name = 'Test')) +--------------PhysicalOlapScan[table1] +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------filter((t3.id < 5)) +--------------PhysicalOlapScan[table1] -- !push_down_topn_union_with_order_by -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalUnion -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------PhysicalOlapScan[table1] -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------PhysicalOlapScan[table1] -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------PhysicalOlapScan[table1] +----PhysicalTopN[LOCAL_SORT] +------PhysicalUnion +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalOlapScan[table1] +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalOlapScan[table1] +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalOlapScan[table1] -- !push_down_topn_nested_union -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalUnion -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------PhysicalOlapScan[table1] -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------PhysicalOlapScan[table1] -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------PhysicalOlapScan[table1] -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------PhysicalOlapScan[table1] +----PhysicalTopN[LOCAL_SORT] +------PhysicalUnion +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalOlapScan[table1] +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalOlapScan[table1] +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalOlapScan[table1] +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalOlapScan[table1] -- !push_down_topn_union_after_join -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalUnion -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() ---------------------PhysicalOlapScan[table1] ---------------------PhysicalOlapScan[table1] -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------PhysicalOlapScan[table1] +----PhysicalTopN[LOCAL_SORT] +------PhysicalUnion +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------hashJoin[INNER_JOIN] hashCondition=((t1.id = t2.id)) otherCondition=() +--------------PhysicalOlapScan[table1] +--------------PhysicalOlapScan[table1] +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalOlapScan[table1] -- !push_down_topn_union_different_projections -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalUnion -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------PhysicalOlapScan[table1] -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------PhysicalOlapScan[table1] +----PhysicalTopN[LOCAL_SORT] +------PhysicalUnion +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalOlapScan[table1] +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalOlapScan[table1] -- !push_down_topn_union_with_subquery -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalUnion -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------filter((table1.score > 20)) ---------------------PhysicalOlapScan[table1] -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------PhysicalOlapScan[table1] +----PhysicalTopN[LOCAL_SORT] +------PhysicalUnion +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------filter((table1.score > 20)) +--------------PhysicalOlapScan[table1] +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalOlapScan[table1] -- !push_down_topn_union_with_limit -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalUnion -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalTopN[LOCAL_SORT] -----------------PhysicalLimit[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------PhysicalLimit[LOCAL] -----------------------PhysicalOlapScan[table1] -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalTopN[LOCAL_SORT] -----------------PhysicalLimit[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------PhysicalLimit[LOCAL] -----------------------PhysicalOlapScan[table1] +----PhysicalTopN[LOCAL_SORT] +------PhysicalUnion +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalLimit[GLOBAL] +--------------PhysicalLimit[LOCAL] +----------------PhysicalOlapScan[table1] +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------PhysicalLimit[GLOBAL] +--------------PhysicalLimit[LOCAL] +----------------PhysicalOlapScan[table1] -- !push_down_topn_union_complex_conditions -- PhysicalResultSink --PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalUnion -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------filter((t1.name = 'Test') and (t1.score > 10)) ---------------------PhysicalOlapScan[table1] -----------PhysicalDistribute[DistributionSpecExecutionAny] -------------PhysicalTopN[MERGE_SORT] ---------------PhysicalDistribute[DistributionSpecGather] -----------------PhysicalTopN[LOCAL_SORT] -------------------filter((t2.id < 5) and (t2.score < 20)) ---------------------PhysicalOlapScan[table1] +----PhysicalTopN[LOCAL_SORT] +------PhysicalUnion +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------filter((t1.name = 'Test') and (t1.score > 10)) +--------------PhysicalOlapScan[table1] +--------PhysicalTopN[MERGE_SORT] +----------PhysicalTopN[LOCAL_SORT] +------------filter((t2.id < 5) and (t2.score < 20)) +--------------PhysicalOlapScan[table1] diff --git a/regression-test/data/nereids_rules_p0/transposeJoin/transposeSemiJoinAgg.out b/regression-test/data/nereids_rules_p0/transposeJoin/transposeSemiJoinAgg.out index 1836054f25ae6fe..d71c15d216fa13a 100644 --- a/regression-test/data/nereids_rules_p0/transposeJoin/transposeSemiJoinAgg.out +++ b/regression-test/data/nereids_rules_p0/transposeJoin/transposeSemiJoinAgg.out @@ -1,101 +1,81 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !groupby_positive_case -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[LOCAL] -------hashJoin[LEFT_SEMI_JOIN] hashCondition=((T3.a = T2.a)) otherCondition=() build RFs:RF0 a->[a] ---------filter((T1.__DORIS_DELETE_SIGN__ = 0)) -----------PhysicalOlapScan[T1] apply RFs: RF0 ---------PhysicalDistribute[DistributionSpecHash] -----------filter((T2.__DORIS_DELETE_SIGN__ = 0)) -------------PhysicalOlapScan[T2] +--hashAgg[LOCAL] +----hashJoin[LEFT_SEMI_JOIN] hashCondition=((T3.a = T2.a)) otherCondition=() +------filter((T1.__DORIS_DELETE_SIGN__ = 0)) +--------PhysicalOlapScan[T1] +------filter((T2.__DORIS_DELETE_SIGN__ = 0)) +--------PhysicalOlapScan[T2] -- !groupby_negative_case -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((T3.D = expr_cast(a as BIGINT))) otherCondition=() -------hashAgg[LOCAL] ---------filter((T1.__DORIS_DELETE_SIGN__ = 0)) -----------PhysicalOlapScan[T1] -------PhysicalDistribute[DistributionSpecReplicated] ---------filter((T2.__DORIS_DELETE_SIGN__ = 0)) -----------PhysicalOlapScan[T2] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((T3.D = expr_cast(a as BIGINT))) otherCondition=() +----hashAgg[LOCAL] +------filter((T1.__DORIS_DELETE_SIGN__ = 0)) +--------PhysicalOlapScan[T1] +----filter((T2.__DORIS_DELETE_SIGN__ = 0)) +------PhysicalOlapScan[T2] -- !grouping_positive_case -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((T3.a = T2.a)) otherCondition=() build RFs:RF0 a->[a] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalRepeat ---------------filter((T1.__DORIS_DELETE_SIGN__ = 0)) -----------------PhysicalOlapScan[T1] apply RFs: RF0 -------PhysicalDistribute[DistributionSpecReplicated] ---------filter((T2.__DORIS_DELETE_SIGN__ = 0)) -----------PhysicalOlapScan[T2] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((T3.a = T2.a)) otherCondition=() +----hashAgg[GLOBAL] +------hashAgg[LOCAL] +--------PhysicalRepeat +----------filter((T1.__DORIS_DELETE_SIGN__ = 0)) +------------PhysicalOlapScan[T1] +----filter((T2.__DORIS_DELETE_SIGN__ = 0)) +------PhysicalOlapScan[T2] -- !grouping_negative_case -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((T3.D = expr_cast(a as BIGINT))) otherCondition=() -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalRepeat ---------------filter((T1.__DORIS_DELETE_SIGN__ = 0)) -----------------PhysicalOlapScan[T1] -------PhysicalDistribute[DistributionSpecReplicated] ---------filter((T2.__DORIS_DELETE_SIGN__ = 0)) -----------PhysicalOlapScan[T2] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((T3.D = expr_cast(a as BIGINT))) otherCondition=() +----hashAgg[GLOBAL] +------hashAgg[LOCAL] +--------PhysicalRepeat +----------filter((T1.__DORIS_DELETE_SIGN__ = 0)) +------------PhysicalOlapScan[T1] +----filter((T2.__DORIS_DELETE_SIGN__ = 0)) +------PhysicalOlapScan[T2] -- !groupby_positive_case2 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashAgg[LOCAL] -------hashJoin[LEFT_SEMI_JOIN] hashCondition=((T3.a = T2.a)) otherCondition=() build RFs:RF0 a->[a] ---------filter((T1.__DORIS_DELETE_SIGN__ = 0)) -----------PhysicalOlapScan[T1] apply RFs: RF0 ---------PhysicalDistribute[DistributionSpecHash] -----------filter((T2.__DORIS_DELETE_SIGN__ = 0)) -------------PhysicalOlapScan[T2] +--hashAgg[LOCAL] +----hashJoin[LEFT_SEMI_JOIN] hashCondition=((T3.a = T2.a)) otherCondition=() +------filter((T1.__DORIS_DELETE_SIGN__ = 0)) +--------PhysicalOlapScan[T1] +------filter((T2.__DORIS_DELETE_SIGN__ = 0)) +--------PhysicalOlapScan[T2] -- !groupby_negative_case2 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((T3.D = expr_cast(a as BIGINT))) otherCondition=() -------hashAgg[LOCAL] ---------filter((T1.__DORIS_DELETE_SIGN__ = 0)) -----------PhysicalOlapScan[T1] -------PhysicalDistribute[DistributionSpecReplicated] ---------filter((T2.__DORIS_DELETE_SIGN__ = 0)) -----------PhysicalOlapScan[T2] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((T3.D = expr_cast(a as BIGINT))) otherCondition=() +----hashAgg[LOCAL] +------filter((T1.__DORIS_DELETE_SIGN__ = 0)) +--------PhysicalOlapScan[T1] +----filter((T2.__DORIS_DELETE_SIGN__ = 0)) +------PhysicalOlapScan[T2] -- !grouping_positive_case2 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((T3.a = T2.a)) otherCondition=() build RFs:RF0 a->[a] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalRepeat ---------------filter((T1.__DORIS_DELETE_SIGN__ = 0)) -----------------PhysicalOlapScan[T1] apply RFs: RF0 -------PhysicalDistribute[DistributionSpecReplicated] ---------filter((T2.__DORIS_DELETE_SIGN__ = 0)) -----------PhysicalOlapScan[T2] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((T3.a = T2.a)) otherCondition=() +----hashAgg[GLOBAL] +------hashAgg[LOCAL] +--------PhysicalRepeat +----------filter((T1.__DORIS_DELETE_SIGN__ = 0)) +------------PhysicalOlapScan[T1] +----filter((T2.__DORIS_DELETE_SIGN__ = 0)) +------PhysicalOlapScan[T2] -- !grouping_negative_case2 -- PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----hashJoin[LEFT_SEMI_JOIN] hashCondition=((T3.D = expr_cast(a as BIGINT))) otherCondition=() -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalRepeat ---------------filter((T1.__DORIS_DELETE_SIGN__ = 0)) -----------------PhysicalOlapScan[T1] -------PhysicalDistribute[DistributionSpecReplicated] ---------filter((T2.__DORIS_DELETE_SIGN__ = 0)) -----------PhysicalOlapScan[T2] +--hashJoin[LEFT_SEMI_JOIN] hashCondition=((T3.D = expr_cast(a as BIGINT))) otherCondition=() +----hashAgg[GLOBAL] +------hashAgg[LOCAL] +--------PhysicalRepeat +----------filter((T1.__DORIS_DELETE_SIGN__ = 0)) +------------PhysicalOlapScan[T1] +----filter((T2.__DORIS_DELETE_SIGN__ = 0)) +------PhysicalOlapScan[T2] diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query17.out b/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query17.out index fcccba6021cc7f8..38b1118083ba324 100644 --- a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query17.out +++ b/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query17.out @@ -22,7 +22,7 @@ PhysicalResultSink --------------------PhysicalProject ----------------------PhysicalDistribute[DistributionSpecHash] ------------------------hashJoin[INNER_JOIN] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF6 s_store_sk->[ss_store_sk] ---------------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[ss_item_sk] +--------------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[ss_item_sk,sr_item_sk] ----------------------------PhysicalDistribute[DistributionSpecHash] ------------------------------PhysicalProject --------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] @@ -35,7 +35,7 @@ PhysicalResultSink ------------------------------------------PhysicalOlapScan[date_dim] ----------------------------------hashJoin[INNER_JOIN] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] ------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 +--------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF5 ------------------------------------PhysicalDistribute[DistributionSpecReplicated] --------------------------------------PhysicalProject ----------------------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query24.out b/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query24.out index 069b4262eaa6748..009769ead1cae7b 100644 --- a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query24.out +++ b/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query24.out @@ -7,7 +7,7 @@ PhysicalCteAnchor ( cteId=CTEId#0 ) --------PhysicalDistribute[DistributionSpecHash] ----------hashAgg[LOCAL] ------------PhysicalProject ---------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF5 sr_item_sk->[ss_item_sk];RF6 sr_ticket_number->[ss_ticket_number] +--------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF5 sr_item_sk->[i_item_sk,ss_item_sk];RF6 sr_ticket_number->[ss_ticket_number] ----------------PhysicalProject ------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] --------------------PhysicalDistribute[DistributionSpecHash] @@ -31,7 +31,7 @@ PhysicalCteAnchor ( cteId=CTEId#0 ) --------------------------------PhysicalOlapScan[customer_address] --------------------PhysicalDistribute[DistributionSpecHash] ----------------------PhysicalProject -------------------------PhysicalOlapScan[item] +------------------------PhysicalOlapScan[item] apply RFs: RF5 ----------------PhysicalDistribute[DistributionSpecHash] ------------------PhysicalProject --------------------PhysicalOlapScan[store_returns] diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query25.out b/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query25.out index c81ff44fa836ec1..7e49210d700e092 100644 --- a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query25.out +++ b/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query25.out @@ -21,7 +21,7 @@ PhysicalResultSink ------------------PhysicalProject --------------------PhysicalDistribute[DistributionSpecHash] ----------------------hashJoin[INNER_JOIN] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF6 s_store_sk->[ss_store_sk] -------------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[ss_item_sk] +------------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[ss_item_sk,sr_item_sk] --------------------------PhysicalDistribute[DistributionSpecHash] ----------------------------PhysicalProject ------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] @@ -34,7 +34,7 @@ PhysicalResultSink ----------------------------------------PhysicalOlapScan[date_dim] --------------------------------hashJoin[INNER_JOIN] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] ----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 +------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF5 ----------------------------------PhysicalDistribute[DistributionSpecReplicated] ------------------------------------PhysicalProject --------------------------------------filter((d2.d_moy <= 10) and (d2.d_moy >= 4) and (d2.d_year = 1999)) diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query29.out b/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query29.out index 512cddbf322d5a4..51e3aaec48acc8d 100644 --- a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query29.out +++ b/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query29.out @@ -15,7 +15,7 @@ PhysicalResultSink ------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 ----------------------PhysicalDistribute[DistributionSpecReplicated] ------------------------hashJoin[INNER_JOIN] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF6 s_store_sk->[ss_store_sk] ---------------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[ss_item_sk] +--------------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[ss_item_sk,sr_item_sk] ----------------------------PhysicalDistribute[DistributionSpecHash] ------------------------------PhysicalProject --------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] @@ -28,7 +28,7 @@ PhysicalResultSink ------------------------------------------PhysicalOlapScan[date_dim] ----------------------------------hashJoin[INNER_JOIN] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] ------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 +--------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF5 ------------------------------------PhysicalDistribute[DistributionSpecReplicated] --------------------------------------PhysicalProject ----------------------------------------filter((d2.d_moy <= 7) and (d2.d_moy >= 4) and (d2.d_year = 1998)) diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query64.out b/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query64.out index 9a6915fe83b718e..fed32dc285e8d84 100644 --- a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query64.out +++ b/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query64.out @@ -47,7 +47,7 @@ PhysicalCteAnchor ( cteId=CTEId#1 ) ----------------------------PhysicalOlapScan[store_returns] apply RFs: RF11 RF12 --------------------------PhysicalDistribute[DistributionSpecHash] ----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = cs_ui.cs_item_sk)) otherCondition=() build RFs:RF10 cs_item_sk->[ss_item_sk] +------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = cs_ui.cs_item_sk)) otherCondition=() build RFs:RF10 cs_item_sk->[ss_item_sk,i_item_sk] --------------------------------PhysicalDistribute[DistributionSpecHash] ----------------------------------PhysicalProject ------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF9 p_promo_sk->[ss_promo_sk] @@ -84,7 +84,7 @@ PhysicalCteAnchor ( cteId=CTEId#1 ) ----------------------------------------------------PhysicalDistribute[DistributionSpecReplicated] ------------------------------------------------------PhysicalProject --------------------------------------------------------filter((item.i_current_price <= 58.00) and (item.i_current_price >= 49.00) and i_color IN ('blush', 'lace', 'lawn', 'misty', 'orange', 'pink')) -----------------------------------------------------------PhysicalOlapScan[item] +----------------------------------------------------------PhysicalOlapScan[item] apply RFs: RF10 ------------------------------------------------PhysicalDistribute[DistributionSpecHash] --------------------------------------------------PhysicalProject ----------------------------------------------------PhysicalOlapScan[customer_address] diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query65.out b/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query65.out index f989ea968c9baea..93e4e4b864778e1 100644 --- a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query65.out +++ b/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query65.out @@ -8,7 +8,7 @@ PhysicalResultSink ----------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = sc.ss_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] ------------PhysicalDistribute[DistributionSpecHash] --------------PhysicalProject -----------------hashJoin[INNER_JOIN] hashCondition=((store.s_store_sk = sc.ss_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] +----------------hashJoin[INNER_JOIN] hashCondition=((store.s_store_sk = sc.ss_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk,ss_store_sk] ------------------hashJoin[INNER_JOIN] hashCondition=((sb.ss_store_sk = sc.ss_store_sk)) otherCondition=((cast(revenue as DOUBLE) <= cast((0.1 * ave) as DOUBLE))) build RFs:RF2 ss_store_sk->[ss_store_sk] --------------------hashAgg[GLOBAL] ----------------------PhysicalDistribute[DistributionSpecHash] @@ -32,7 +32,7 @@ PhysicalResultSink ------------------------------------PhysicalProject --------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 +------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF3 ----------------------------------------PhysicalDistribute[DistributionSpecReplicated] ------------------------------------------PhysicalProject --------------------------------------------filter((date_dim.d_month_seq <= 1187) and (date_dim.d_month_seq >= 1176)) diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query17.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query17.out index f128eebed4dda5f..bdd8ac1f16afae6 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query17.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query17.out @@ -18,12 +18,12 @@ PhysicalResultSink ------------------------------PhysicalProject --------------------------------hashJoin[INNER_JOIN] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] ----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[sr_customer_sk];RF4 cs_item_sk->[sr_item_sk] +------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[ss_customer_sk,sr_customer_sk];RF4 cs_item_sk->[ss_item_sk,sr_item_sk] --------------------------------------PhysicalDistribute[DistributionSpecHash] ----------------------------------------PhysicalProject ------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF0 sr_customer_sk->[ss_customer_sk];RF1 sr_item_sk->[ss_item_sk];RF2 sr_ticket_number->[ss_ticket_number] --------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF5 +----------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 RF5 --------------------------------------------PhysicalProject ----------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF6 --------------------------------------PhysicalDistribute[DistributionSpecHash] diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query25.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query25.out index 812e7dfedd354a1..3b9cc675c0cb85c 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query25.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query25.out @@ -17,12 +17,12 @@ PhysicalResultSink ----------------------------PhysicalProject ------------------------------hashJoin[INNER_JOIN] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] --------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[sr_customer_sk];RF4 cs_item_sk->[sr_item_sk] +----------------------------------hashJoin[INNER_JOIN] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[ss_customer_sk,sr_customer_sk];RF4 cs_item_sk->[ss_item_sk,sr_item_sk] ------------------------------------PhysicalDistribute[DistributionSpecHash] --------------------------------------PhysicalProject ----------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF0 sr_customer_sk->[ss_customer_sk];RF1 sr_item_sk->[ss_item_sk];RF2 sr_ticket_number->[ss_ticket_number] ------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF5 +--------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 RF5 ------------------------------------------PhysicalProject --------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF6 ------------------------------------PhysicalDistribute[DistributionSpecHash] diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query29.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query29.out index 27eccad7610eada..63f23267f04aa0b 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query29.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query29.out @@ -16,12 +16,12 @@ PhysicalResultSink --------------------------PhysicalProject ----------------------------hashJoin[INNER_JOIN] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] ------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[sr_customer_sk];RF4 cs_item_sk->[sr_item_sk] +--------------------------------hashJoin[INNER_JOIN] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[ss_customer_sk,sr_customer_sk];RF4 cs_item_sk->[ss_item_sk,sr_item_sk] ----------------------------------PhysicalDistribute[DistributionSpecHash] ------------------------------------PhysicalProject --------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF0 sr_customer_sk->[ss_customer_sk];RF1 sr_item_sk->[ss_item_sk];RF2 sr_ticket_number->[ss_ticket_number] ----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF5 +------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 RF5 ----------------------------------------PhysicalProject ------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF7 ----------------------------------PhysicalDistribute[DistributionSpecHash] diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query58.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query58.out index 3859969de559f5f..c59907df29ca362 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query58.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query58.out @@ -6,7 +6,7 @@ PhysicalResultSink ------PhysicalDistribute[DistributionSpecGather] --------PhysicalTopN[LOCAL_SORT] ----------PhysicalProject -------------hashJoin[INNER_JOIN] hashCondition=((ss_items.item_id = ws_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE))) build RFs:RF13 item_id->[i_item_id] +------------hashJoin[INNER_JOIN] hashCondition=((ss_items.item_id = ws_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE))) build RFs:RF13 item_id->[i_item_id,i_item_id] --------------PhysicalProject ----------------hashJoin[INNER_JOIN] hashCondition=((ss_items.item_id = cs_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE))) build RFs:RF12 item_id->[i_item_id] ------------------PhysicalProject @@ -47,12 +47,12 @@ PhysicalResultSink --------------------------PhysicalProject ----------------------------hashJoin[INNER_JOIN] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] ------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() +--------------------------------hashJoin[INNER_JOIN] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 +------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF6 RF7 ----------------------------------PhysicalDistribute[DistributionSpecReplicated] ------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] +--------------------------------------PhysicalOlapScan[item] apply RFs: RF13 ------------------------------PhysicalDistribute[DistributionSpecReplicated] --------------------------------PhysicalProject ----------------------------------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query64.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query64.out index bb2fd53c1de17aa..2703f5aba8b5ea6 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query64.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query64.out @@ -5,7 +5,7 @@ PhysicalCteAnchor ( cteId=CTEId#1 ) ----PhysicalProject ------hashAgg[LOCAL] --------PhysicalProject -----------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = cs_ui.cs_item_sk)) otherCondition=() build RFs:RF19 cs_item_sk->[ss_item_sk] +----------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = cs_ui.cs_item_sk)) otherCondition=() build RFs:RF19 cs_item_sk->[ss_item_sk,i_item_sk,sr_item_sk] ------------PhysicalProject --------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() ----------------PhysicalProject @@ -23,7 +23,7 @@ PhysicalCteAnchor ( cteId=CTEId#1 ) ----------------------------------------PhysicalProject ------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() --------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() +----------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF9 sr_item_sk->[ss_item_sk,i_item_sk];RF10 sr_ticket_number->[ss_ticket_number] ------------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[ss_item_sk] --------------------------------------------------PhysicalDistribute[DistributionSpecHash] ----------------------------------------------------PhysicalProject @@ -39,7 +39,7 @@ PhysicalCteAnchor ( cteId=CTEId#1 ) ------------------------------------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() --------------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] ----------------------------------------------------------------------------PhysicalProject -------------------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF8 RF17 RF19 +------------------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF8 RF9 RF10 RF17 RF19 --------------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] ----------------------------------------------------------------------------PhysicalProject ------------------------------------------------------------------------------hashJoin[INNER_JOIN] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() @@ -64,10 +64,10 @@ PhysicalCteAnchor ( cteId=CTEId#1 ) --------------------------------------------------PhysicalDistribute[DistributionSpecHash] ----------------------------------------------------PhysicalProject ------------------------------------------------------filter((item.i_current_price <= 33.00) and (item.i_current_price >= 24.00) and i_color IN ('blanched', 'brown', 'burlywood', 'chocolate', 'drab', 'medium')) ---------------------------------------------------------PhysicalOlapScan[item] +--------------------------------------------------------PhysicalOlapScan[item] apply RFs: RF9 RF19 ------------------------------------------------PhysicalDistribute[DistributionSpecHash] --------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[store_returns] +----------------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF19 --------------------------------------------PhysicalDistribute[DistributionSpecReplicated] ----------------------------------------------PhysicalProject ------------------------------------------------PhysicalOlapScan[store] diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query65.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query65.out index 016dd4798a98103..0efb98d242f58ca 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query65.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query65.out @@ -5,9 +5,9 @@ PhysicalResultSink ----PhysicalDistribute[DistributionSpecGather] ------PhysicalTopN[LOCAL_SORT] --------PhysicalProject -----------hashJoin[INNER_JOIN] hashCondition=((sb.ss_store_sk = sc.ss_store_sk)) otherCondition=((cast(revenue as DOUBLE) <= cast((0.1 * ave) as DOUBLE))) build RFs:RF4 ss_store_sk->[ss_store_sk] +----------hashJoin[INNER_JOIN] hashCondition=((sb.ss_store_sk = sc.ss_store_sk)) otherCondition=((cast(revenue as DOUBLE) <= cast((0.1 * ave) as DOUBLE))) build RFs:RF4 ss_store_sk->[s_store_sk,ss_store_sk] ------------PhysicalProject ---------------hashJoin[INNER_JOIN] hashCondition=((store.s_store_sk = sc.ss_store_sk)) otherCondition=() +--------------hashJoin[INNER_JOIN] hashCondition=((store.s_store_sk = sc.ss_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] ----------------PhysicalDistribute[DistributionSpecHash] ------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = sc.ss_item_sk)) otherCondition=() --------------------hashAgg[GLOBAL] @@ -16,7 +16,7 @@ PhysicalResultSink --------------------------PhysicalProject ----------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF4 +--------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF3 RF4 ------------------------------PhysicalDistribute[DistributionSpecReplicated] --------------------------------PhysicalProject ----------------------------------filter((date_dim.d_month_seq <= 1232) and (date_dim.d_month_seq >= 1221)) @@ -26,7 +26,7 @@ PhysicalResultSink ------------------------PhysicalOlapScan[item] ----------------PhysicalDistribute[DistributionSpecHash] ------------------PhysicalProject ---------------------PhysicalOlapScan[store] +--------------------PhysicalOlapScan[store] apply RFs: RF4 ------------hashAgg[GLOBAL] --------------PhysicalDistribute[DistributionSpecHash] ----------------hashAgg[LOCAL] diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query83.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query83.out index 9e9ac913a32476c..7471697b9f613fb 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query83.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query83.out @@ -6,7 +6,7 @@ PhysicalResultSink ------PhysicalDistribute[DistributionSpecGather] --------PhysicalTopN[LOCAL_SORT] ----------PhysicalProject -------------hashJoin[INNER_JOIN] hashCondition=((sr_items.item_id = wr_items.item_id)) otherCondition=() build RFs:RF13 item_id->[i_item_id] +------------hashJoin[INNER_JOIN] hashCondition=((sr_items.item_id = wr_items.item_id)) otherCondition=() build RFs:RF13 item_id->[i_item_id,i_item_id] --------------PhysicalProject ----------------hashJoin[INNER_JOIN] hashCondition=((sr_items.item_id = cr_items.item_id)) otherCondition=() build RFs:RF12 item_id->[i_item_id] ------------------PhysicalProject @@ -44,12 +44,12 @@ PhysicalResultSink --------------------------PhysicalProject ----------------------------hashJoin[INNER_JOIN] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cr_returned_date_sk] ------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN] hashCondition=((catalog_returns.cr_item_sk = item.i_item_sk)) otherCondition=() +--------------------------------hashJoin[INNER_JOIN] hashCondition=((catalog_returns.cr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cr_item_sk] ----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF7 +------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF6 RF7 ----------------------------------PhysicalDistribute[DistributionSpecReplicated] ------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] +--------------------------------------PhysicalOlapScan[item] apply RFs: RF13 ------------------------------PhysicalDistribute[DistributionSpecReplicated] --------------------------------PhysicalProject ----------------------------------hashJoin[LEFT_SEMI_JOIN] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query95.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query95.out index 4c4fbb34f6e667c..ef8fa368e6d63eb 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query95.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query95.out @@ -26,7 +26,7 @@ PhysicalCteAnchor ( cteId=CTEId#0 ) ----------------------hashJoin[INNER_JOIN] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF5 web_site_sk->[ws_web_site_sk] ------------------------hashJoin[INNER_JOIN] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF4 ca_address_sk->[ws_ship_addr_sk] --------------------------hashJoin[INNER_JOIN] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ws_ship_date_sk] -----------------------------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((ws1.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF2 ws_order_number->[wr_order_number] +----------------------------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((ws1.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF2 ws_order_number->[ws_order_number,wr_order_number] ------------------------------PhysicalProject --------------------------------hashJoin[INNER_JOIN] hashCondition=((web_returns.wr_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF1 wr_order_number->[ws_order_number] ----------------------------------PhysicalDistribute[DistributionSpecHash] diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query17.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query17.out index 77b39740066f27d..d604dd0a7be84ba 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query17.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query17.out @@ -10,7 +10,7 @@ PhysicalResultSink --------------hashAgg[LOCAL] ----------------PhysicalProject ------------------hashJoin[INNER_JOIN] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF9 s_store_sk->[ss_store_sk] ---------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[ss_item_sk] +--------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[ss_item_sk,sr_item_sk] ----------------------PhysicalProject ------------------------hashJoin[INNER_JOIN] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] --------------------------PhysicalProject @@ -18,14 +18,14 @@ PhysicalResultSink ------------------------------PhysicalProject --------------------------------hashJoin[INNER_JOIN] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] ----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[sr_customer_sk];RF4 cs_item_sk->[sr_item_sk] +------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[ss_customer_sk,sr_customer_sk];RF4 cs_item_sk->[ss_item_sk,sr_item_sk] --------------------------------------PhysicalDistribute[DistributionSpecHash] ----------------------------------------PhysicalProject ------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF0 sr_customer_sk->[ss_customer_sk];RF1 sr_item_sk->[ss_item_sk];RF2 sr_ticket_number->[ss_ticket_number] --------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF5 RF8 RF9 +----------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 RF5 RF8 RF9 --------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF6 +----------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF6 RF8 --------------------------------------PhysicalDistribute[DistributionSpecHash] ----------------------------------------PhysicalProject ------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query24.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query24.out index 7912023ea737f6b..5f2ab88b0b8f368 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query24.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query24.out @@ -9,7 +9,7 @@ PhysicalCteAnchor ( cteId=CTEId#0 ) ------------PhysicalProject --------------hashJoin[INNER_JOIN] hashCondition=((store.s_zip = customer_address.ca_zip) and (store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_zip->[ca_zip];RF6 s_store_sk->[ss_store_sk] ----------------PhysicalProject -------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] +------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk,sr_item_sk] --------------------hashJoin[INNER_JOIN] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(( not (c_birth_country = upper(ca_country)))) build RFs:RF3 ca_address_sk->[c_current_addr_sk] ----------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ss_customer_sk] ------------------------PhysicalDistribute[DistributionSpecHash] @@ -18,7 +18,7 @@ PhysicalCteAnchor ( cteId=CTEId#0 ) ------------------------------PhysicalProject --------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF4 RF6 ------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_returns] +--------------------------------PhysicalOlapScan[store_returns] apply RFs: RF4 ------------------------PhysicalDistribute[DistributionSpecHash] --------------------------PhysicalProject ----------------------------PhysicalOlapScan[customer] apply RFs: RF3 diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query25.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query25.out index 8a2f034a723e001..ecfc4cf3f19adee 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query25.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query25.out @@ -9,7 +9,7 @@ PhysicalResultSink ------------hashAgg[LOCAL] --------------PhysicalProject ----------------hashJoin[INNER_JOIN] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF9 s_store_sk->[ss_store_sk] -------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[ss_item_sk] +------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[ss_item_sk,sr_item_sk] --------------------PhysicalProject ----------------------hashJoin[INNER_JOIN] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] ------------------------PhysicalProject @@ -17,14 +17,14 @@ PhysicalResultSink ----------------------------PhysicalProject ------------------------------hashJoin[INNER_JOIN] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] --------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[sr_customer_sk];RF4 cs_item_sk->[sr_item_sk] +----------------------------------hashJoin[INNER_JOIN] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[ss_customer_sk,sr_customer_sk];RF4 cs_item_sk->[ss_item_sk,sr_item_sk] ------------------------------------PhysicalDistribute[DistributionSpecHash] --------------------------------------PhysicalProject ----------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF0 sr_customer_sk->[ss_customer_sk];RF1 sr_item_sk->[ss_item_sk];RF2 sr_ticket_number->[ss_ticket_number] ------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF5 RF8 RF9 +--------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 RF5 RF8 RF9 ------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF6 +--------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF6 RF8 ------------------------------------PhysicalDistribute[DistributionSpecHash] --------------------------------------PhysicalProject ----------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query29.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query29.out index 194fb43a5b7691e..f792e648e87598c 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query29.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query29.out @@ -9,21 +9,21 @@ PhysicalResultSink ------------hashAgg[LOCAL] --------------PhysicalProject ----------------hashJoin[INNER_JOIN] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF9 s_store_sk->[ss_store_sk] -------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[ss_item_sk] +------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[ss_item_sk,sr_item_sk] --------------------PhysicalProject ----------------------hashJoin[INNER_JOIN] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[sr_returned_date_sk] ------------------------hashJoin[INNER_JOIN] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[cs_sold_date_sk] --------------------------PhysicalProject ----------------------------hashJoin[INNER_JOIN] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] ------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[sr_customer_sk];RF4 cs_item_sk->[sr_item_sk] +--------------------------------hashJoin[INNER_JOIN] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[ss_customer_sk,sr_customer_sk];RF4 cs_item_sk->[ss_item_sk,sr_item_sk] ----------------------------------PhysicalDistribute[DistributionSpecHash] ------------------------------------PhysicalProject --------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF0 sr_customer_sk->[ss_customer_sk];RF1 sr_item_sk->[ss_item_sk];RF2 sr_ticket_number->[ss_ticket_number] ----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF5 RF8 RF9 +------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 RF5 RF8 RF9 ----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF7 +------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF7 RF8 ----------------------------------PhysicalDistribute[DistributionSpecHash] ------------------------------------PhysicalProject --------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF6 diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query58.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query58.out index f6e88adcc4de407..a449bf2addb1b87 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query58.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query58.out @@ -6,7 +6,7 @@ PhysicalResultSink ------PhysicalDistribute[DistributionSpecGather] --------PhysicalTopN[LOCAL_SORT] ----------PhysicalProject -------------hashJoin[INNER_JOIN] hashCondition=((ss_items.item_id = ws_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE))) build RFs:RF13 item_id->[i_item_id] +------------hashJoin[INNER_JOIN] hashCondition=((ss_items.item_id = ws_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE))) build RFs:RF13 item_id->[i_item_id,i_item_id] --------------PhysicalProject ----------------hashJoin[INNER_JOIN] hashCondition=((ss_items.item_id = cs_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE))) build RFs:RF12 item_id->[i_item_id] ------------------PhysicalProject @@ -52,7 +52,7 @@ PhysicalResultSink ------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF6 RF7 ----------------------------------PhysicalDistribute[DistributionSpecReplicated] ------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] +--------------------------------------PhysicalOlapScan[item] apply RFs: RF13 ------------------------------PhysicalDistribute[DistributionSpecReplicated] --------------------------------PhysicalProject ----------------------------------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query64.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query64.out index 62538dde904b7ee..2caec5e3f93e19a 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query64.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query64.out @@ -5,7 +5,7 @@ PhysicalCteAnchor ( cteId=CTEId#1 ) ----PhysicalProject ------hashAgg[LOCAL] --------PhysicalProject -----------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = cs_ui.cs_item_sk)) otherCondition=() build RFs:RF19 cs_item_sk->[ss_item_sk] +----------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = cs_ui.cs_item_sk)) otherCondition=() build RFs:RF19 cs_item_sk->[ss_item_sk,i_item_sk,sr_item_sk] ------------PhysicalProject --------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF18 p_promo_sk->[ss_promo_sk] ----------------PhysicalProject @@ -23,7 +23,7 @@ PhysicalCteAnchor ( cteId=CTEId#1 ) ----------------------------------------PhysicalProject ------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF11 s_store_sk->[ss_store_sk] --------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF9 sr_item_sk->[ss_item_sk];RF10 sr_ticket_number->[ss_ticket_number] +----------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF9 sr_item_sk->[ss_item_sk,i_item_sk];RF10 sr_ticket_number->[ss_ticket_number] ------------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[ss_item_sk] --------------------------------------------------PhysicalDistribute[DistributionSpecHash] ----------------------------------------------------PhysicalProject @@ -64,10 +64,10 @@ PhysicalCteAnchor ( cteId=CTEId#1 ) --------------------------------------------------PhysicalDistribute[DistributionSpecHash] ----------------------------------------------------PhysicalProject ------------------------------------------------------filter((item.i_current_price <= 33.00) and (item.i_current_price >= 24.00) and i_color IN ('blanched', 'brown', 'burlywood', 'chocolate', 'drab', 'medium')) ---------------------------------------------------------PhysicalOlapScan[item] +--------------------------------------------------------PhysicalOlapScan[item] apply RFs: RF9 RF19 ------------------------------------------------PhysicalDistribute[DistributionSpecHash] --------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[store_returns] +----------------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF19 --------------------------------------------PhysicalDistribute[DistributionSpecReplicated] ----------------------------------------------PhysicalProject ------------------------------------------------PhysicalOlapScan[store] diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query65.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query65.out index b2ccb8ef1073266..76324a3ac4cf610 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query65.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query65.out @@ -5,7 +5,7 @@ PhysicalResultSink ----PhysicalDistribute[DistributionSpecGather] ------PhysicalTopN[LOCAL_SORT] --------PhysicalProject -----------hashJoin[INNER_JOIN] hashCondition=((sb.ss_store_sk = sc.ss_store_sk)) otherCondition=((cast(revenue as DOUBLE) <= cast((0.1 * ave) as DOUBLE))) build RFs:RF4 ss_store_sk->[ss_store_sk] +----------hashJoin[INNER_JOIN] hashCondition=((sb.ss_store_sk = sc.ss_store_sk)) otherCondition=((cast(revenue as DOUBLE) <= cast((0.1 * ave) as DOUBLE))) build RFs:RF4 ss_store_sk->[s_store_sk,ss_store_sk] ------------PhysicalProject --------------hashJoin[INNER_JOIN] hashCondition=((store.s_store_sk = sc.ss_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] ----------------PhysicalDistribute[DistributionSpecHash] @@ -26,7 +26,7 @@ PhysicalResultSink ------------------------PhysicalOlapScan[item] ----------------PhysicalDistribute[DistributionSpecHash] ------------------PhysicalProject ---------------------PhysicalOlapScan[store] +--------------------PhysicalOlapScan[store] apply RFs: RF4 ------------hashAgg[GLOBAL] --------------PhysicalDistribute[DistributionSpecHash] ----------------hashAgg[LOCAL] diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query72.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query72.out index 982d6d30c895982..c57ed5119f6a614 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query72.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query72.out @@ -17,13 +17,13 @@ PhysicalResultSink ----------------------------hashJoin[INNER_JOIN] hashCondition=((catalog_sales.cs_sold_date_sk = d1.d_date_sk) and (d1.d_week_seq = d2.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq];RF5 d_date_sk->[cs_sold_date_sk] ------------------------------hashJoin[INNER_JOIN] hashCondition=((inventory.inv_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[inv_date_sk] --------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] +----------------------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk,inv_item_sk] ------------------------------------PhysicalProject --------------------------------------hashJoin[INNER_JOIN] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF1 cd_demo_sk->[cs_bill_cdemo_sk] ----------------------------------------PhysicalProject ------------------------------------------hashJoin[INNER_JOIN] hashCondition=((catalog_sales.cs_item_sk = inventory.inv_item_sk)) otherCondition=((inventory.inv_quantity_on_hand < catalog_sales.cs_quantity)) build RFs:RF0 cs_item_sk->[inv_item_sk] --------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF3 RF8 +----------------------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF2 RF3 RF8 --------------------------------------------PhysicalDistribute[DistributionSpecHash] ----------------------------------------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((catalog_returns.cr_item_sk = catalog_sales.cs_item_sk) and (catalog_returns.cr_order_number = catalog_sales.cs_order_number)) otherCondition=() ------------------------------------------------hashJoin[LEFT_OUTER_JOIN] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query83.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query83.out index 500b145c00c228d..67079c26354d844 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query83.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query83.out @@ -6,7 +6,7 @@ PhysicalResultSink ------PhysicalDistribute[DistributionSpecGather] --------PhysicalTopN[LOCAL_SORT] ----------PhysicalProject -------------hashJoin[INNER_JOIN] hashCondition=((sr_items.item_id = wr_items.item_id)) otherCondition=() build RFs:RF13 item_id->[i_item_id] +------------hashJoin[INNER_JOIN] hashCondition=((sr_items.item_id = wr_items.item_id)) otherCondition=() build RFs:RF13 item_id->[i_item_id,i_item_id] --------------PhysicalProject ----------------hashJoin[INNER_JOIN] hashCondition=((sr_items.item_id = cr_items.item_id)) otherCondition=() build RFs:RF12 item_id->[i_item_id] ------------------PhysicalProject @@ -49,7 +49,7 @@ PhysicalResultSink ------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF6 RF7 ----------------------------------PhysicalDistribute[DistributionSpecReplicated] ------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] +--------------------------------------PhysicalOlapScan[item] apply RFs: RF13 ------------------------------PhysicalDistribute[DistributionSpecReplicated] --------------------------------PhysicalProject ----------------------------------hashJoin[LEFT_SEMI_JOIN] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query95.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query95.out index 1ce11a6c98f67fe..c977812d0a172fd 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query95.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query95.out @@ -26,7 +26,7 @@ PhysicalCteAnchor ( cteId=CTEId#0 ) ----------------------hashJoin[INNER_JOIN] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF5 web_site_sk->[ws_web_site_sk] ------------------------hashJoin[INNER_JOIN] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF4 ca_address_sk->[ws_ship_addr_sk] --------------------------hashJoin[INNER_JOIN] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ws_ship_date_sk] -----------------------------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((ws1.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF2 ws_order_number->[wr_order_number] +----------------------------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((ws1.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF2 ws_order_number->[ws_order_number,wr_order_number] ------------------------------PhysicalProject --------------------------------hashJoin[INNER_JOIN] hashCondition=((web_returns.wr_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF1 wr_order_number->[ws_order_number] ----------------------------------PhysicalDistribute[DistributionSpecHash] diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query64.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query64.out index f7fa3929c99038c..5443c04d5c4b81c 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query64.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query64.out @@ -11,7 +11,7 @@ PhysicalCteAnchor ( cteId=CTEId#1 ) ----------------PhysicalProject ------------------hashJoin[INNER_JOIN] hashCondition=((customer.c_first_sales_date_sk = d2.d_date_sk)) otherCondition=() --------------------PhysicalProject -----------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=(( not (cd_marital_status = cd_marital_status))) build RFs:RF17 ss_customer_sk->[c_customer_sk] +----------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=(( not (cd_marital_status = cd_marital_status))) build RFs:RF18 ss_customer_sk->[c_customer_sk] ------------------------PhysicalDistribute[DistributionSpecHash] --------------------------PhysicalProject ----------------------------hashJoin[INNER_JOIN] hashCondition=((customer.c_current_addr_sk = ad2.ca_address_sk)) otherCondition=() @@ -22,7 +22,7 @@ PhysicalCteAnchor ( cteId=CTEId#1 ) --------------------------------------PhysicalProject ----------------------------------------hashJoin[INNER_JOIN] hashCondition=((customer.c_current_hdemo_sk = hd2.hd_demo_sk)) otherCondition=() ------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[customer] apply RFs: RF17 +--------------------------------------------PhysicalOlapScan[customer] apply RFs: RF18 ------------------------------------------PhysicalDistribute[DistributionSpecReplicated] --------------------------------------------PhysicalProject ----------------------------------------------hashJoin[INNER_JOIN] hashCondition=((hd2.hd_income_band_sk = ib2.ib_income_band_sk)) otherCondition=() @@ -39,24 +39,24 @@ PhysicalCteAnchor ( cteId=CTEId#1 ) ----------------------------------PhysicalOlapScan[customer_address] ------------------------PhysicalDistribute[DistributionSpecHash] --------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF11 ss_item_sk->[sr_item_sk];RF12 ss_ticket_number->[sr_ticket_number] +----------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF12 ss_item_sk->[sr_item_sk];RF13 ss_ticket_number->[sr_ticket_number] ------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_returns] apply RFs: RF11 RF12 +--------------------------------PhysicalOlapScan[store_returns] apply RFs: RF12 RF13 ------------------------------PhysicalDistribute[DistributionSpecHash] --------------------------------PhysicalProject ----------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() ------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF9 ss_cdemo_sk->[cd_demo_sk] +--------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF10 ss_cdemo_sk->[cd_demo_sk] ----------------------------------------PhysicalDistribute[DistributionSpecHash] ------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF9 +--------------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF10 ----------------------------------------PhysicalDistribute[DistributionSpecHash] ------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_addr_sk = ad1.ca_address_sk)) otherCondition=() build RFs:RF8 ss_addr_sk->[ca_address_sk] +--------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_addr_sk = ad1.ca_address_sk)) otherCondition=() build RFs:RF9 ss_addr_sk->[ca_address_sk] ----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF8 +------------------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF9 ----------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[ss_item_sk] +------------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[ss_item_sk];RF8 i_item_sk->[cs_item_sk] --------------------------------------------------PhysicalProject ----------------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() ------------------------------------------------------PhysicalProject @@ -78,7 +78,7 @@ PhysicalCteAnchor ( cteId=CTEId#1 ) ----------------------------------------------------------------------------------PhysicalProject ------------------------------------------------------------------------------------hashJoin[INNER_JOIN] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF0 cr_order_number->[cs_order_number];RF1 cr_item_sk->[cs_item_sk] --------------------------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 +----------------------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF8 --------------------------------------------------------------------------------------PhysicalProject ----------------------------------------------------------------------------------------PhysicalOlapScan[catalog_returns] ------------------------------------------------------------------PhysicalDistribute[DistributionSpecReplicated] diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query17.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query17.out index fcccba6021cc7f8..38b1118083ba324 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query17.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query17.out @@ -22,7 +22,7 @@ PhysicalResultSink --------------------PhysicalProject ----------------------PhysicalDistribute[DistributionSpecHash] ------------------------hashJoin[INNER_JOIN] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF6 s_store_sk->[ss_store_sk] ---------------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[ss_item_sk] +--------------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[ss_item_sk,sr_item_sk] ----------------------------PhysicalDistribute[DistributionSpecHash] ------------------------------PhysicalProject --------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] @@ -35,7 +35,7 @@ PhysicalResultSink ------------------------------------------PhysicalOlapScan[date_dim] ----------------------------------hashJoin[INNER_JOIN] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] ------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 +--------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF5 ------------------------------------PhysicalDistribute[DistributionSpecReplicated] --------------------------------------PhysicalProject ----------------------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query24.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query24.out index ff1ea2cb66b145a..479424affd183fc 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query24.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query24.out @@ -7,7 +7,7 @@ PhysicalCteAnchor ( cteId=CTEId#0 ) --------PhysicalDistribute[DistributionSpecHash] ----------hashAgg[LOCAL] ------------PhysicalProject ---------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF5 sr_item_sk->[ss_item_sk];RF6 sr_ticket_number->[ss_ticket_number] +--------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF5 sr_item_sk->[i_item_sk,ss_item_sk];RF6 sr_ticket_number->[ss_ticket_number] ----------------PhysicalProject ------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] --------------------PhysicalDistribute[DistributionSpecHash] @@ -31,7 +31,7 @@ PhysicalCteAnchor ( cteId=CTEId#0 ) --------------------------------PhysicalOlapScan[customer_address] --------------------PhysicalDistribute[DistributionSpecHash] ----------------------PhysicalProject -------------------------PhysicalOlapScan[item] +------------------------PhysicalOlapScan[item] apply RFs: RF5 ----------------PhysicalDistribute[DistributionSpecHash] ------------------PhysicalProject --------------------PhysicalOlapScan[store_returns] diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query25.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query25.out index f03e994d147616f..62c752c60ce3ed1 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query25.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query25.out @@ -21,7 +21,7 @@ PhysicalResultSink ------------------PhysicalProject --------------------PhysicalDistribute[DistributionSpecHash] ----------------------hashJoin[INNER_JOIN] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF6 s_store_sk->[ss_store_sk] -------------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[ss_item_sk] +------------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[ss_item_sk,sr_item_sk] --------------------------PhysicalDistribute[DistributionSpecHash] ----------------------------PhysicalProject ------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] @@ -34,7 +34,7 @@ PhysicalResultSink ----------------------------------------PhysicalOlapScan[date_dim] --------------------------------hashJoin[INNER_JOIN] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] ----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 +------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF5 ----------------------------------PhysicalDistribute[DistributionSpecReplicated] ------------------------------------PhysicalProject --------------------------------------filter((d2.d_moy <= 10) and (d2.d_moy >= 4) and (d2.d_year = 2000)) diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query29.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query29.out index c7a67607b947af8..85996806881158d 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query29.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query29.out @@ -15,7 +15,7 @@ PhysicalResultSink ------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 ----------------------PhysicalDistribute[DistributionSpecReplicated] ------------------------hashJoin[INNER_JOIN] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF6 s_store_sk->[ss_store_sk] ---------------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[ss_item_sk] +--------------------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[ss_item_sk,sr_item_sk] ----------------------------PhysicalDistribute[DistributionSpecHash] ------------------------------PhysicalProject --------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] @@ -28,7 +28,7 @@ PhysicalResultSink ------------------------------------------PhysicalOlapScan[date_dim] ----------------------------------hashJoin[INNER_JOIN] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] ------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 +--------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF5 ------------------------------------PhysicalDistribute[DistributionSpecReplicated] --------------------------------------PhysicalProject ----------------------------------------filter((d2.d_moy <= 7) and (d2.d_moy >= 4) and (d2.d_year = 1999)) diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query64.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query64.out index e43185203b60864..55860e7b0b62392 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query64.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query64.out @@ -7,27 +7,27 @@ PhysicalCteAnchor ( cteId=CTEId#1 ) --------PhysicalDistribute[DistributionSpecHash] ----------hashAgg[LOCAL] ------------PhysicalProject ---------------hashJoin[INNER_JOIN] hashCondition=((customer.c_first_shipto_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF19 d_date_sk->[c_first_shipto_date_sk] +--------------hashJoin[INNER_JOIN] hashCondition=((customer.c_first_shipto_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF20 d_date_sk->[c_first_shipto_date_sk] ----------------PhysicalProject -------------------hashJoin[INNER_JOIN] hashCondition=((customer.c_first_sales_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF18 d_date_sk->[c_first_sales_date_sk] +------------------hashJoin[INNER_JOIN] hashCondition=((customer.c_first_sales_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF19 d_date_sk->[c_first_sales_date_sk] --------------------PhysicalProject -----------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=(( not (cd_marital_status = cd_marital_status))) build RFs:RF17 ss_customer_sk->[c_customer_sk] +----------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=(( not (cd_marital_status = cd_marital_status))) build RFs:RF18 ss_customer_sk->[c_customer_sk] ------------------------PhysicalDistribute[DistributionSpecHash] --------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN] hashCondition=((customer.c_current_addr_sk = ad2.ca_address_sk)) otherCondition=() build RFs:RF16 ca_address_sk->[c_current_addr_sk] +----------------------------hashJoin[INNER_JOIN] hashCondition=((customer.c_current_addr_sk = ad2.ca_address_sk)) otherCondition=() build RFs:RF17 ca_address_sk->[c_current_addr_sk] ------------------------------PhysicalDistribute[DistributionSpecHash] --------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() build RFs:RF15 cd_demo_sk->[c_current_cdemo_sk] +----------------------------------hashJoin[INNER_JOIN] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() build RFs:RF16 cd_demo_sk->[c_current_cdemo_sk] ------------------------------------PhysicalDistribute[DistributionSpecHash] --------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN] hashCondition=((customer.c_current_hdemo_sk = hd2.hd_demo_sk)) otherCondition=() build RFs:RF14 hd_demo_sk->[c_current_hdemo_sk] +----------------------------------------hashJoin[INNER_JOIN] hashCondition=((customer.c_current_hdemo_sk = hd2.hd_demo_sk)) otherCondition=() build RFs:RF15 hd_demo_sk->[c_current_hdemo_sk] ------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[customer] apply RFs: RF14 RF15 RF16 RF17 RF18 RF19 +--------------------------------------------PhysicalOlapScan[customer] apply RFs: RF15 RF16 RF17 RF18 RF19 RF20 ------------------------------------------PhysicalDistribute[DistributionSpecReplicated] --------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN] hashCondition=((hd2.hd_income_band_sk = ib2.ib_income_band_sk)) otherCondition=() build RFs:RF13 ib_income_band_sk->[hd_income_band_sk] +----------------------------------------------hashJoin[INNER_JOIN] hashCondition=((hd2.hd_income_band_sk = ib2.ib_income_band_sk)) otherCondition=() build RFs:RF14 ib_income_band_sk->[hd_income_band_sk] ------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[household_demographics] apply RFs: RF13 +--------------------------------------------------PhysicalOlapScan[household_demographics] apply RFs: RF14 ------------------------------------------------PhysicalDistribute[DistributionSpecReplicated] --------------------------------------------------PhysicalProject ----------------------------------------------------PhysicalOlapScan[income_band] @@ -39,24 +39,24 @@ PhysicalCteAnchor ( cteId=CTEId#1 ) ----------------------------------PhysicalOlapScan[customer_address] ------------------------PhysicalDistribute[DistributionSpecHash] --------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF11 ss_item_sk->[sr_item_sk];RF12 ss_ticket_number->[sr_ticket_number] +----------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF12 ss_item_sk->[sr_item_sk];RF13 ss_ticket_number->[sr_ticket_number] ------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_returns] apply RFs: RF11 RF12 +--------------------------------PhysicalOlapScan[store_returns] apply RFs: RF12 RF13 ------------------------------PhysicalDistribute[DistributionSpecHash] --------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF10 p_promo_sk->[ss_promo_sk] +----------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF11 p_promo_sk->[ss_promo_sk] ------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF9 ss_cdemo_sk->[cd_demo_sk] +--------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF10 ss_cdemo_sk->[cd_demo_sk] ----------------------------------------PhysicalDistribute[DistributionSpecHash] ------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF9 +--------------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF10 ----------------------------------------PhysicalDistribute[DistributionSpecHash] ------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_addr_sk = ad1.ca_address_sk)) otherCondition=() build RFs:RF8 ss_addr_sk->[ca_address_sk] +--------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_addr_sk = ad1.ca_address_sk)) otherCondition=() build RFs:RF9 ss_addr_sk->[ca_address_sk] ----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF8 +------------------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF9 ----------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[ss_item_sk] +------------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[ss_item_sk];RF8 i_item_sk->[cs_item_sk] --------------------------------------------------PhysicalProject ----------------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF6 s_store_sk->[ss_store_sk] ------------------------------------------------------PhysicalProject @@ -68,7 +68,7 @@ PhysicalCteAnchor ( cteId=CTEId#1 ) ------------------------------------------------------------------PhysicalProject --------------------------------------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_item_sk = cs_ui.cs_item_sk)) otherCondition=() build RFs:RF2 cs_item_sk->[ss_item_sk] ----------------------------------------------------------------------PhysicalProject -------------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 RF4 RF6 RF7 RF10 +------------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 RF4 RF6 RF7 RF11 ----------------------------------------------------------------------PhysicalDistribute[DistributionSpecReplicated] ------------------------------------------------------------------------PhysicalProject --------------------------------------------------------------------------filter((sale > (2 * refund))) @@ -78,7 +78,7 @@ PhysicalCteAnchor ( cteId=CTEId#1 ) ----------------------------------------------------------------------------------PhysicalProject ------------------------------------------------------------------------------------hashJoin[INNER_JOIN] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF0 cr_order_number->[cs_order_number];RF1 cr_item_sk->[cs_item_sk] --------------------------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 +----------------------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF8 --------------------------------------------------------------------------------------PhysicalProject ----------------------------------------------------------------------------------------PhysicalOlapScan[catalog_returns] ------------------------------------------------------------------PhysicalDistribute[DistributionSpecReplicated] diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query65.out b/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query65.out index 224a3efc9177867..b08b5de695991e1 100644 --- a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query65.out +++ b/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query65.out @@ -5,7 +5,7 @@ PhysicalResultSink ----PhysicalDistribute[DistributionSpecGather] ------PhysicalTopN[LOCAL_SORT] --------PhysicalProject -----------hashJoin[INNER_JOIN] hashCondition=((store.s_store_sk = sc.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] +----------hashJoin[INNER_JOIN] hashCondition=((store.s_store_sk = sc.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk,ss_store_sk] ------------PhysicalProject --------------hashJoin[INNER_JOIN] hashCondition=((item.i_item_sk = sc.ss_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] ----------------PhysicalDistribute[DistributionSpecHash] @@ -32,7 +32,7 @@ PhysicalResultSink ------------------------------------PhysicalProject --------------------------------------hashJoin[INNER_JOIN] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 +------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF4 ----------------------------------------PhysicalDistribute[DistributionSpecReplicated] ------------------------------------------PhysicalProject --------------------------------------------filter((date_dim.d_month_seq <= 1232) and (date_dim.d_month_seq >= 1221)) diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20-rewrite.out b/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20-rewrite.out index 12ae602cc271849..69ad0ace6592fe1 100644 --- a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20-rewrite.out +++ b/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20-rewrite.out @@ -7,7 +7,7 @@ PhysicalResultSink --------PhysicalProject ----------hashJoin[INNER_JOIN] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey] ------------PhysicalProject ---------------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((supplier.s_suppkey = t3.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[ps_suppkey] +--------------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((supplier.s_suppkey = t3.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[ps_suppkey,l_suppkey] ----------------PhysicalDistribute[DistributionSpecHash] ------------------PhysicalProject --------------------hashJoin[INNER_JOIN] hashCondition=((t2.l_partkey = t1.ps_partkey) and (t2.l_suppkey = t1.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > t2.l_q)) build RFs:RF1 ps_partkey->[l_partkey];RF2 ps_suppkey->[l_suppkey] @@ -17,7 +17,7 @@ PhysicalResultSink ----------------------------hashAgg[LOCAL] ------------------------------PhysicalProject --------------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -----------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 +----------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 RF3 ----------------------PhysicalDistribute[DistributionSpecHash] ------------------------hashJoin[LEFT_SEMI_JOIN] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] --------------------------PhysicalProject diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20.out b/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20.out index 0be36907365f5f8..a8a7aba201253cb 100644 --- a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20.out +++ b/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20.out @@ -7,7 +7,7 @@ PhysicalResultSink --------PhysicalProject ----------hashJoin[INNER_JOIN] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey] ------------PhysicalProject ---------------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[ps_suppkey] +--------------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[ps_suppkey,l_suppkey] ----------------PhysicalDistribute[DistributionSpecHash] ------------------PhysicalProject --------------------hashJoin[INNER_JOIN] hashCondition=((lineitem.l_partkey = partsupp.ps_partkey) and (lineitem.l_suppkey = partsupp.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > (0.5 * sum(l_quantity)))) build RFs:RF1 ps_partkey->[l_partkey];RF2 ps_suppkey->[l_suppkey] @@ -16,7 +16,7 @@ PhysicalResultSink --------------------------hashAgg[LOCAL] ----------------------------PhysicalProject ------------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 +--------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 RF3 ----------------------PhysicalDistribute[DistributionSpecHash] ------------------------hashJoin[LEFT_SEMI_JOIN] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] --------------------------PhysicalProject diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q5.out b/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q5.out index 7a89beaeb271fec..f3053b41c70b75d 100644 --- a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q5.out +++ b/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q5.out @@ -10,7 +10,7 @@ PhysicalResultSink --------------PhysicalProject ----------------hashJoin[INNER_JOIN] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF5 r_regionkey->[n_regionkey] ------------------PhysicalProject ---------------------hashJoin[INNER_JOIN] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey] +--------------------hashJoin[INNER_JOIN] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey,c_nationkey] ----------------------PhysicalProject ------------------------hashJoin[INNER_JOIN] hashCondition=((customer.c_nationkey = supplier.s_nationkey) and (lineitem.l_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[l_suppkey];RF3 s_nationkey->[c_nationkey] --------------------------PhysicalProject @@ -26,7 +26,7 @@ PhysicalResultSink ------------------------------------------PhysicalOlapScan[orders] apply RFs: RF0 ------------------------------------PhysicalDistribute[DistributionSpecHash] --------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 +----------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 RF4 --------------------------PhysicalDistribute[DistributionSpecReplicated] ----------------------------PhysicalProject ------------------------------PhysicalOlapScan[supplier] apply RFs: RF4 diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q20-rewrite.out b/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q20-rewrite.out index 9f5eda020127c12..4856ba9c8749e62 100644 --- a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q20-rewrite.out +++ b/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q20-rewrite.out @@ -5,7 +5,7 @@ PhysicalResultSink ----PhysicalDistribute[DistributionSpecGather] ------PhysicalQuickSort[LOCAL_SORT] --------PhysicalProject -----------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((supplier.s_suppkey = t3.ps_suppkey)) otherCondition=() build RFs:RF4 s_suppkey->[ps_suppkey] +----------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((supplier.s_suppkey = t3.ps_suppkey)) otherCondition=() build RFs:RF4 s_suppkey->[ps_suppkey,l_suppkey] ------------PhysicalDistribute[DistributionSpecHash] --------------PhysicalProject ----------------hashJoin[INNER_JOIN] hashCondition=((t2.l_partkey = t1.ps_partkey) and (t2.l_suppkey = t1.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > t2.l_q)) build RFs:RF2 ps_partkey->[l_partkey];RF3 ps_suppkey->[l_suppkey] @@ -15,7 +15,7 @@ PhysicalResultSink ------------------------hashAgg[LOCAL] --------------------------PhysicalProject ----------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -------------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 +------------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 RF4 ------------------PhysicalDistribute[DistributionSpecHash] --------------------hashJoin[LEFT_SEMI_JOIN] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF1 p_partkey->[ps_partkey] ----------------------PhysicalProject diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q20.out b/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q20.out index 9069a99c3a4157d..bbfe15d9102de9e 100644 --- a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q20.out +++ b/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q20.out @@ -5,7 +5,7 @@ PhysicalResultSink ----PhysicalDistribute[DistributionSpecGather] ------PhysicalQuickSort[LOCAL_SORT] --------PhysicalProject -----------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF4 s_suppkey->[ps_suppkey] +----------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF4 s_suppkey->[ps_suppkey,l_suppkey] ------------PhysicalDistribute[DistributionSpecHash] --------------PhysicalProject ----------------hashJoin[INNER_JOIN] hashCondition=((lineitem.l_partkey = partsupp.ps_partkey) and (lineitem.l_suppkey = partsupp.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > (0.5 * sum(l_quantity)))) build RFs:RF2 ps_partkey->[l_partkey];RF3 ps_suppkey->[l_suppkey] @@ -14,7 +14,7 @@ PhysicalResultSink ----------------------hashAgg[LOCAL] ------------------------PhysicalProject --------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -----------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 +----------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 RF4 ------------------PhysicalDistribute[DistributionSpecHash] --------------------hashJoin[LEFT_SEMI_JOIN] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF1 p_partkey->[ps_partkey] ----------------------PhysicalProject diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q20-rewrite.out b/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q20-rewrite.out index 9f5eda020127c12..4856ba9c8749e62 100644 --- a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q20-rewrite.out +++ b/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q20-rewrite.out @@ -5,7 +5,7 @@ PhysicalResultSink ----PhysicalDistribute[DistributionSpecGather] ------PhysicalQuickSort[LOCAL_SORT] --------PhysicalProject -----------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((supplier.s_suppkey = t3.ps_suppkey)) otherCondition=() build RFs:RF4 s_suppkey->[ps_suppkey] +----------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((supplier.s_suppkey = t3.ps_suppkey)) otherCondition=() build RFs:RF4 s_suppkey->[ps_suppkey,l_suppkey] ------------PhysicalDistribute[DistributionSpecHash] --------------PhysicalProject ----------------hashJoin[INNER_JOIN] hashCondition=((t2.l_partkey = t1.ps_partkey) and (t2.l_suppkey = t1.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > t2.l_q)) build RFs:RF2 ps_partkey->[l_partkey];RF3 ps_suppkey->[l_suppkey] @@ -15,7 +15,7 @@ PhysicalResultSink ------------------------hashAgg[LOCAL] --------------------------PhysicalProject ----------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -------------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 +------------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 RF4 ------------------PhysicalDistribute[DistributionSpecHash] --------------------hashJoin[LEFT_SEMI_JOIN] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF1 p_partkey->[ps_partkey] ----------------------PhysicalProject diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q20.out b/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q20.out index 9069a99c3a4157d..bbfe15d9102de9e 100644 --- a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q20.out +++ b/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q20.out @@ -5,7 +5,7 @@ PhysicalResultSink ----PhysicalDistribute[DistributionSpecGather] ------PhysicalQuickSort[LOCAL_SORT] --------PhysicalProject -----------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF4 s_suppkey->[ps_suppkey] +----------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF4 s_suppkey->[ps_suppkey,l_suppkey] ------------PhysicalDistribute[DistributionSpecHash] --------------PhysicalProject ----------------hashJoin[INNER_JOIN] hashCondition=((lineitem.l_partkey = partsupp.ps_partkey) and (lineitem.l_suppkey = partsupp.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > (0.5 * sum(l_quantity)))) build RFs:RF2 ps_partkey->[l_partkey];RF3 ps_suppkey->[l_suppkey] @@ -14,7 +14,7 @@ PhysicalResultSink ----------------------hashAgg[LOCAL] ------------------------PhysicalProject --------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -----------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 +----------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 RF4 ------------------PhysicalDistribute[DistributionSpecHash] --------------------hashJoin[LEFT_SEMI_JOIN] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF1 p_partkey->[ps_partkey] ----------------------PhysicalProject diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q5.out b/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q5.out index 7145ebda405a2b7..0963b0a556a20e5 100644 --- a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q5.out +++ b/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q5.out @@ -8,7 +8,7 @@ PhysicalResultSink ----------PhysicalDistribute[DistributionSpecHash] ------------hashAgg[LOCAL] --------------PhysicalProject -----------------hashJoin[INNER_JOIN] hashCondition=((customer.c_custkey = orders.o_custkey) and (customer.c_nationkey = supplier.s_nationkey)) otherCondition=() build RFs:RF4 c_nationkey->[s_nationkey];RF5 c_custkey->[o_custkey] +----------------hashJoin[INNER_JOIN] hashCondition=((customer.c_custkey = orders.o_custkey) and (customer.c_nationkey = supplier.s_nationkey)) otherCondition=() build RFs:RF4 c_nationkey->[s_nationkey,n_nationkey];RF5 c_custkey->[o_custkey] ------------------PhysicalDistribute[DistributionSpecHash] --------------------PhysicalProject ----------------------hashJoin[INNER_JOIN] hashCondition=((lineitem.l_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[l_suppkey] @@ -26,7 +26,7 @@ PhysicalResultSink ----------------------------PhysicalDistribute[DistributionSpecReplicated] ------------------------------hashJoin[INNER_JOIN] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF0 r_regionkey->[n_regionkey] --------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[nation] apply RFs: RF0 +----------------------------------PhysicalOlapScan[nation] apply RFs: RF0 RF4 --------------------------------PhysicalDistribute[DistributionSpecReplicated] ----------------------------------PhysicalProject ------------------------------------filter((region.r_name = 'ASIA')) diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q9.out b/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q9.out index 3ff1375bf25171b..af4927d5f0a80bf 100644 --- a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q9.out +++ b/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q9.out @@ -8,7 +8,7 @@ PhysicalResultSink ----------PhysicalDistribute[DistributionSpecHash] ------------hashAgg[LOCAL] --------------PhysicalProject -----------------hashJoin[INNER_JOIN] hashCondition=((partsupp.ps_partkey = lineitem.l_partkey) and (partsupp.ps_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF4 ps_suppkey->[l_suppkey];RF5 ps_partkey->[l_partkey] +----------------hashJoin[INNER_JOIN] hashCondition=((partsupp.ps_partkey = lineitem.l_partkey) and (partsupp.ps_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF4 ps_suppkey->[l_suppkey,s_suppkey];RF5 ps_partkey->[p_partkey,l_partkey] ------------------PhysicalProject --------------------hashJoin[INNER_JOIN] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[l_suppkey] ----------------------PhysicalDistribute[DistributionSpecHash] @@ -24,11 +24,11 @@ PhysicalResultSink --------------------------------PhysicalDistribute[DistributionSpecHash] ----------------------------------PhysicalProject ------------------------------------filter((p_name like '%green%')) ---------------------------------------PhysicalOlapScan[part] +--------------------------------------PhysicalOlapScan[part] apply RFs: RF5 ----------------------PhysicalDistribute[DistributionSpecHash] ------------------------hashJoin[INNER_JOIN] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] --------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] apply RFs: RF0 +----------------------------PhysicalOlapScan[supplier] apply RFs: RF0 RF4 --------------------------PhysicalDistribute[DistributionSpecReplicated] ----------------------------PhysicalProject ------------------------------PhysicalOlapScan[nation] diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20-rewrite.out b/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20-rewrite.out index 12ae602cc271849..69ad0ace6592fe1 100644 --- a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20-rewrite.out +++ b/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20-rewrite.out @@ -7,7 +7,7 @@ PhysicalResultSink --------PhysicalProject ----------hashJoin[INNER_JOIN] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey] ------------PhysicalProject ---------------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((supplier.s_suppkey = t3.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[ps_suppkey] +--------------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((supplier.s_suppkey = t3.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[ps_suppkey,l_suppkey] ----------------PhysicalDistribute[DistributionSpecHash] ------------------PhysicalProject --------------------hashJoin[INNER_JOIN] hashCondition=((t2.l_partkey = t1.ps_partkey) and (t2.l_suppkey = t1.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > t2.l_q)) build RFs:RF1 ps_partkey->[l_partkey];RF2 ps_suppkey->[l_suppkey] @@ -17,7 +17,7 @@ PhysicalResultSink ----------------------------hashAgg[LOCAL] ------------------------------PhysicalProject --------------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -----------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 +----------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 RF3 ----------------------PhysicalDistribute[DistributionSpecHash] ------------------------hashJoin[LEFT_SEMI_JOIN] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] --------------------------PhysicalProject diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20.out b/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20.out index 0be36907365f5f8..a8a7aba201253cb 100644 --- a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20.out +++ b/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20.out @@ -7,7 +7,7 @@ PhysicalResultSink --------PhysicalProject ----------hashJoin[INNER_JOIN] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey] ------------PhysicalProject ---------------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[ps_suppkey] +--------------hashJoin[RIGHT_SEMI_JOIN] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[ps_suppkey,l_suppkey] ----------------PhysicalDistribute[DistributionSpecHash] ------------------PhysicalProject --------------------hashJoin[INNER_JOIN] hashCondition=((lineitem.l_partkey = partsupp.ps_partkey) and (lineitem.l_suppkey = partsupp.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > (0.5 * sum(l_quantity)))) build RFs:RF1 ps_partkey->[l_partkey];RF2 ps_suppkey->[l_suppkey] @@ -16,7 +16,7 @@ PhysicalResultSink --------------------------hashAgg[LOCAL] ----------------------------PhysicalProject ------------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 +--------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 RF3 ----------------------PhysicalDistribute[DistributionSpecHash] ------------------------hashJoin[LEFT_SEMI_JOIN] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] --------------------------PhysicalProject diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q5.out b/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q5.out index 7a89beaeb271fec..f3053b41c70b75d 100644 --- a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q5.out +++ b/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q5.out @@ -10,7 +10,7 @@ PhysicalResultSink --------------PhysicalProject ----------------hashJoin[INNER_JOIN] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF5 r_regionkey->[n_regionkey] ------------------PhysicalProject ---------------------hashJoin[INNER_JOIN] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey] +--------------------hashJoin[INNER_JOIN] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey,c_nationkey] ----------------------PhysicalProject ------------------------hashJoin[INNER_JOIN] hashCondition=((customer.c_nationkey = supplier.s_nationkey) and (lineitem.l_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[l_suppkey];RF3 s_nationkey->[c_nationkey] --------------------------PhysicalProject @@ -26,7 +26,7 @@ PhysicalResultSink ------------------------------------------PhysicalOlapScan[orders] apply RFs: RF0 ------------------------------------PhysicalDistribute[DistributionSpecHash] --------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 +----------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 RF4 --------------------------PhysicalDistribute[DistributionSpecReplicated] ----------------------------PhysicalProject ------------------------------PhysicalOlapScan[supplier] apply RFs: RF4 diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q9.out b/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q9.out index a8dab44bff2c24d..0710be21e188d77 100644 --- a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q9.out +++ b/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q9.out @@ -10,9 +10,9 @@ PhysicalResultSink --------------PhysicalProject ----------------hashJoin[INNER_JOIN] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF5 n_nationkey->[s_nationkey] ------------------PhysicalProject ---------------------hashJoin[INNER_JOIN] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF4 s_suppkey->[l_suppkey] +--------------------hashJoin[INNER_JOIN] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF4 s_suppkey->[l_suppkey,ps_suppkey] ----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN] hashCondition=((partsupp.ps_partkey = lineitem.l_partkey) and (partsupp.ps_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF2 ps_suppkey->[l_suppkey];RF3 ps_partkey->[l_partkey] +------------------------hashJoin[INNER_JOIN] hashCondition=((partsupp.ps_partkey = lineitem.l_partkey) and (partsupp.ps_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF2 ps_suppkey->[l_suppkey];RF3 ps_partkey->[p_partkey,l_partkey] --------------------------PhysicalProject ----------------------------hashJoin[INNER_JOIN] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF1 p_partkey->[l_partkey] ------------------------------PhysicalDistribute[DistributionSpecHash] @@ -24,10 +24,10 @@ PhysicalResultSink ------------------------------PhysicalDistribute[DistributionSpecHash] --------------------------------PhysicalProject ----------------------------------filter((p_name like '%green%')) -------------------------------------PhysicalOlapScan[part] +------------------------------------PhysicalOlapScan[part] apply RFs: RF3 --------------------------PhysicalDistribute[DistributionSpecHash] ----------------------------PhysicalProject -------------------------------PhysicalOlapScan[partsupp] +------------------------------PhysicalOlapScan[partsupp] apply RFs: RF4 ----------------------PhysicalDistribute[DistributionSpecReplicated] ------------------------PhysicalProject --------------------------PhysicalOlapScan[supplier] apply RFs: RF5 diff --git a/regression-test/data/query_p0/sql_functions/cast_function/test_cast_struct.out b/regression-test/data/query_p0/sql_functions/cast_function/test_cast_struct.out index 99933f68e2fd4be..2630debef2d861c 100644 --- a/regression-test/data/query_p0/sql_functions/cast_function/test_cast_struct.out +++ b/regression-test/data/query_p0/sql_functions/cast_function/test_cast_struct.out @@ -39,5 +39,5 @@ {"f1": 1, "f2": "2022-10-10"} -- !sql14 -- -{"f1": 1, "f2": "2022-10-10 00:00:00"} +{"f1": 1.0, "f2": "2022-10-10 00:00:00"} diff --git a/regression-test/data/query_p0/sql_functions/struct_functions/test_struct_functions_by_literal.out b/regression-test/data/query_p0/sql_functions/struct_functions/test_struct_functions_by_literal.out index 59df80eeeda2401..fdfc56edb27e049 100644 --- a/regression-test/data/query_p0/sql_functions/struct_functions/test_struct_functions_by_literal.out +++ b/regression-test/data/query_p0/sql_functions/struct_functions/test_struct_functions_by_literal.out @@ -1,21 +1,21 @@ -- This file is automatically generated. You should know what you did if you want to edit this -- !sql -- -{"1": "a", "2": 1, "3": "doris", "4": "aaaaa", "5": 1.32} +{"col1": "a", "col2": 1, "col3": "doris", "col4": "aaaaa", "col5": 1.32} -- !sql -- -{"1": 1, "2": 2, "3": 3} +{"col1": 1, "col2": 2, "col3": 3} -- !sql -- -{"1": 1, "2": 1000, "3": 10000000000} +{"col1": 1, "col2": 1000, "col3": 10000000000} -- !sql -- -{"1": "a", "2": 1, "3": "doris", "4": "aaaaa", "5": 1.32} +{"col1": "a", "col2": 1, "col3": "doris", "col4": "aaaaa", "col5": 1.32} -- !sql -- -{"1": 1, "2": "a", "3": null} +{"col1": 1, "col2": "a", "col3": null} -- !sql -- -{"1": null, "2": null, "3": null} +{"col1": null, "col2": null, "col3": null} -- !sql -- {"f1": 1, "f2": 2, "f3": 3} diff --git a/regression-test/suites/fault_injection_p0/test_load_stream_fault_injection.groovy b/regression-test/suites/fault_injection_p0/test_load_stream_fault_injection.groovy index f5dcbb1b7e48be0..d5cd8097f964a4f 100644 --- a/regression-test/suites/fault_injection_p0/test_load_stream_fault_injection.groovy +++ b/regression-test/suites/fault_injection_p0/test_load_stream_fault_injection.groovy @@ -125,6 +125,20 @@ suite("load_stream_fault_injection", "nonConcurrent") { } } + def load_with_injection2 = { injection1, injection2, error_msg-> + try { + GetDebugPoint().enableDebugPointForAllBEs(injection1) + GetDebugPoint().enableDebugPointForAllBEs(injection2) + sql "insert into test select * from baseall where k1 <= 3" + } catch(Exception e) { + logger.info(e.getMessage()) + assertTrue(e.getMessage().contains(error_msg)) + } finally { + GetDebugPoint().disableDebugPointForAllBEs(injection1) + GetDebugPoint().disableDebugPointForAllBEs(injection2) + } + } + // LoadStreamWriter create file failed load_with_injection("LocalFileSystem.create_file_impl.open_file_failed", "") // LoadStreamWriter append_data meet null file writer error @@ -161,14 +175,10 @@ suite("load_stream_fault_injection", "nonConcurrent") { load_with_injection("LoadStream._dispatch.unknown_srcid", "") // LoadStream meets StreamRPC idle timeout - get_be_param("load_stream_idle_timeout_ms") - set_be_param("load_stream_idle_timeout_ms", 500) try { - load_with_injection("LoadStreamStub._send_with_retry.delay_before_send", "") + load_with_injection2("LoadStreamStub._send_with_retry.delay_before_send", "PInternalServiceImpl.open_load_stream.set_idle_timeout", "") } catch(Exception e) { logger.info(e.getMessage()) - } finally { - reset_be_param("load_stream_idle_timeout_ms") } } diff --git a/regression-test/suites/fault_injection_p0/test_low_wal_disk_space_fault_injection.groovy b/regression-test/suites/fault_injection_p0/test_low_wal_disk_space_fault_injection.groovy new file mode 100644 index 000000000000000..815bafe3d4b0956 --- /dev/null +++ b/regression-test/suites/fault_injection_p0/test_low_wal_disk_space_fault_injection.groovy @@ -0,0 +1,66 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_wal_mem_back_pressure_time_out_fault_injection","nonConcurrent") { + + + def tableName = "wal_test" + sql """ DROP TABLE IF EXISTS ${tableName} """ + + sql """ + CREATE TABLE IF NOT EXISTS ${tableName} ( + `k` int , + `v` int , + ) engine=olap + UNIQUE KEY(k) + DISTRIBUTED BY HASH(`k`) + BUCKETS 32 + properties("replication_num" = "1") + """ + + GetDebugPoint().clearDebugPointsForAllBEs() + + sql """ set group_commit = async_mode; """ + try { + GetDebugPoint().enableDebugPointForAllBEs("GroupCommitBlockSink._add_blocks.return_sync_mode") + def t1 = [] + for (int i = 0; i < 20; i++) { + t1.add(Thread.startDaemon { + streamLoad { + table "${tableName}1" + + set 'column_separator', ',' + set 'compress_type', 'GZ' + set 'format', 'csv' + set 'group_commit', 'async_mode' + unset 'label' + + file 'test_low_wal_disk_space_fault_injection.csv.gz' + time 600000 + } + }) + } + t1.join() + } catch (Exception e) { + logger.info(e.getMessage()) + // make sure there is no exception. + assertFalse(true) + } finally { + GetDebugPoint().disableDebugPointForAllBEs("GroupCommitBlockSink._add_blocks.return_sync_mode") + } + +} \ No newline at end of file diff --git a/regression-test/suites/fault_injection_p0/test_wal_mem_back_pressure_fault_injection.groovy b/regression-test/suites/fault_injection_p0/test_wal_mem_back_pressure_fault_injection.groovy index ea7782591333ace..e87ddf11e05eb48 100644 --- a/regression-test/suites/fault_injection_p0/test_wal_mem_back_pressure_fault_injection.groovy +++ b/regression-test/suites/fault_injection_p0/test_wal_mem_back_pressure_fault_injection.groovy @@ -123,4 +123,35 @@ suite("test_wal_mem_back_pressure_fault_injection","nonConcurrent") { thread1.join() + // new test + + sql """ DROP TABLE IF EXISTS ${tableName} """ + + sql """ + CREATE TABLE IF NOT EXISTS ${tableName} ( + `k` int , + `v` int , + ) engine=olap + DISTRIBUTED BY HASH(`k`) + BUCKETS 5 + properties("replication_num" = "1") + """ + + GetDebugPoint().clearDebugPointsForAllBEs() + + def exception = false; + sql """ set group_commit = async_mode; """ + try { + GetDebugPoint().enableDebugPointForAllBEs("VWalWriter.write_wal.fail") + sql """insert into ${tableName} values(1,1)""" + assertFalse(true); + } catch (Exception e) { + logger.info(e.getMessage()) + assertTrue(e.getMessage().contains('Failed to write wal!')) + exception = true; + } finally { + GetDebugPoint().disableDebugPointForAllBEs("VWalWriter.write_wal.fail") + assertTrue(exception) + } + } \ No newline at end of file diff --git a/regression-test/suites/insert_p0/test_struct_insert.groovy b/regression-test/suites/insert_p0/test_struct_insert.groovy index f6448d02ae13f74..a845c9785807617 100644 --- a/regression-test/suites/insert_p0/test_struct_insert.groovy +++ b/regression-test/suites/insert_p0/test_struct_insert.groovy @@ -47,22 +47,24 @@ suite("test_struct_insert") { sql "set enable_insert_strict = true" + // TODO reopen these cases after we could process cast right in BE and FE + // current, it is do right thing in a wrong way. because cast varchar in struct is wrong // invalid cases - test { - // k5 is not nullable, can not insert null - sql "insert into ${testTable} values (111,null,null,null,null)" - exception "Insert has filtered data" - } - test { - // size of char type in struct is 10, can not insert string with length more than 10 - sql "insert into ${testTable} values (112,null,null,null,{'1234567890123',null,null})" - exception "Insert has filtered data" - } - test { - // size of varchar type in struct is 10, can not insert string with length more than 10 - sql "insert into ${testTable} values (113,null,null,null,{null,'12345678901234',null})" - exception "Insert has filtered data" - } + // test { + // // k5 is not nullable, can not insert null + // sql "insert into ${testTable} values (111,null,null,null,null)" + // exception "Insert has filtered data" + // } + // test { + // // size of char type in struct is 10, can not insert string with length more than 10 + // sql "insert into ${testTable} values (112,null,null,null,{'1234567890123',null,null})" + // exception "Insert has filtered data" + // } + // test { + // // size of varchar type in struct is 10, can not insert string with length more than 10 + // sql "insert into ${testTable} values (113,null,null,null,{null,'12345678901234',null})" + // exception "Insert has filtered data" + // } // normal cases include nullable and nullable nested fields sql "INSERT INTO ${testTable} VALUES(1, {1,11,111,1111,11111,11111,111111},null,null,{'','',''})" sql "INSERT INTO ${testTable} VALUES(2, {null,null,null,null,null,null,null},{2.1,2.22,2.333},null,{null,null,null})" diff --git a/regression-test/suites/nereids_p0/create_table/test_ctas.groovy b/regression-test/suites/nereids_p0/create_table/test_ctas.groovy index dcbf41139b86bd8..507bff03ea00923 100644 --- a/regression-test/suites/nereids_p0/create_table/test_ctas.groovy +++ b/regression-test/suites/nereids_p0/create_table/test_ctas.groovy @@ -72,8 +72,8 @@ suite("nereids_test_ctas") { ) as select test_varchar, lpad(test_text,10,'0') as test_text, test_datetime, test_default_timestamp from test_ctas; """ - res = sql """SHOW CREATE TABLE `test_ctas2`""" - assertTrue(res.size() != 0) + def res1 = sql """SHOW CREATE TABLE `test_ctas2`""" + assertTrue(res1.size() != 0) qt_select """select count(*) from test_ctas2""" diff --git a/regression-test/suites/nereids_p0/eliminate_outer_join/eliminate_outer_join.groovy b/regression-test/suites/nereids_p0/eliminate_outer_join/eliminate_outer_join.groovy index b66f462facd3698..f63715093870c65 100644 --- a/regression-test/suites/nereids_p0/eliminate_outer_join/eliminate_outer_join.groovy +++ b/regression-test/suites/nereids_p0/eliminate_outer_join/eliminate_outer_join.groovy @@ -22,10 +22,8 @@ suite("eliminate_outer_join") { sql "set disable_join_reorder=true" sql "set forbid_unknown_col_stats=false" sql "set enable_bucket_shuffle_join=false" - sql "set enable_runtime_filter_prune=false" - sql """ - set ignore_shape_nodes='PhysicalDistribute, PhysicalProject' - """ + sql "set runtime_filter_mode=OFF" + sql "set ignore_shape_nodes='PhysicalDistribute, PhysicalProject'" String database = context.config.getDbNameByFile(context.file) sql "drop database if exists ${database}" sql "create database ${database}" diff --git a/regression-test/suites/nereids_p0/expression/case_when_to_if.groovy b/regression-test/suites/nereids_p0/expression/case_when_to_if.groovy index 8c41b8d1c67bff4..c83ce27c017073a 100644 --- a/regression-test/suites/nereids_p0/expression/case_when_to_if.groovy +++ b/regression-test/suites/nereids_p0/expression/case_when_to_if.groovy @@ -30,7 +30,7 @@ suite("test_case_when_to_if") { from test_case_when_to_if group by k2; ''' - res = sql ''' + def res = sql ''' explain rewritten plan select k2, sum(case when (k1=1) then 1 end) sum1 from test_case_when_to_if @@ -45,13 +45,13 @@ suite("test_case_when_to_if") { from test_case_when_to_if group by k2; ''' - res = sql ''' + def res1 = sql ''' explain rewritten plan select k2, sum(case when (k1=1) then 1 else null end) sum1 from test_case_when_to_if group by k2; ''' - assertTrue(res.toString().contains("if")) + assertTrue(res1.toString().contains("if")) sql ''' select k2, @@ -59,11 +59,11 @@ suite("test_case_when_to_if") { from test_case_when_to_if group by k2; ''' - res = sql ''' + def res2 = sql ''' explain rewritten plan select k2, sum(case when (k1>0) then k1 else abs(k1) end) sum1 from test_case_when_to_if group by k2; ''' - assertTrue(res.toString().contains("if")) + assertTrue(res2.toString().contains("if")) } diff --git a/regression-test/suites/nereids_p0/expression/topn_to_max.groovy b/regression-test/suites/nereids_p0/expression/topn_to_max.groovy index 4ed378144fac547..83fb9cc84921d87 100644 --- a/regression-test/suites/nereids_p0/expression/topn_to_max.groovy +++ b/regression-test/suites/nereids_p0/expression/topn_to_max.groovy @@ -41,9 +41,9 @@ suite("test_topn_to_max") { select topn(k2, 1) from test_topn_to_max; ''' - res = sql ''' + def res1 = sql ''' explain rewritten plan select topn(k2, 1) from test_topn_to_max; ''' - assertTrue(res.toString().contains("max"), res.toString() + " should contain max") + assertTrue(res1.toString().contains("max"), res1.toString() + " should contain max") } diff --git a/regression-test/suites/nereids_rules_p0/bind_relation/bind_view.groovy b/regression-test/suites/nereids_rules_p0/bind_relation/bind_view.groovy index f3f7641b732dd65..b69fddad1d886f7 100644 --- a/regression-test/suites/nereids_rules_p0/bind_relation/bind_view.groovy +++ b/regression-test/suites/nereids_rules_p0/bind_relation/bind_view.groovy @@ -17,6 +17,7 @@ suite("test_bind_view") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" def table_name = "base_table" diff --git a/regression-test/suites/nereids_rules_p0/eager_aggregate/basic.groovy b/regression-test/suites/nereids_rules_p0/eager_aggregate/basic.groovy index 4380e7f0d546542..afa64135d393ddc 100644 --- a/regression-test/suites/nereids_rules_p0/eager_aggregate/basic.groovy +++ b/regression-test/suites/nereids_rules_p0/eager_aggregate/basic.groovy @@ -17,6 +17,7 @@ suite("eager_aggregate_basic") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" diff --git a/regression-test/suites/nereids_rules_p0/eager_aggregate/basic_one_side.groovy b/regression-test/suites/nereids_rules_p0/eager_aggregate/basic_one_side.groovy index 653919e322eb3dd..cb84e0cc1ec6aa6 100644 --- a/regression-test/suites/nereids_rules_p0/eager_aggregate/basic_one_side.groovy +++ b/regression-test/suites/nereids_rules_p0/eager_aggregate/basic_one_side.groovy @@ -17,6 +17,7 @@ suite("eager_aggregate_basic_one_side") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" diff --git a/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_count_through_join.groovy b/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_count_through_join.groovy index 1414110374d84d6..f5f4bf53b45236d 100644 --- a/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_count_through_join.groovy +++ b/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_count_through_join.groovy @@ -17,6 +17,7 @@ suite("push_down_count_through_join") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" diff --git a/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_count_through_join_one_side.groovy b/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_count_through_join_one_side.groovy index 3bf9febfe93e8ed..037368f051f4b2e 100644 --- a/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_count_through_join_one_side.groovy +++ b/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_count_through_join_one_side.groovy @@ -17,6 +17,7 @@ suite("push_down_count_through_join_one_side") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" diff --git a/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_max_through_join.groovy b/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_max_through_join.groovy index 40cdca48aaa80aa..68d1946b35e7d51 100644 --- a/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_max_through_join.groovy +++ b/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_max_through_join.groovy @@ -17,6 +17,7 @@ suite("push_down_max_through_join") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" diff --git a/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_min_through_join.groovy b/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_min_through_join.groovy index 906c35e19a772a5..560bf1c0d72d2c5 100644 --- a/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_min_through_join.groovy +++ b/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_min_through_join.groovy @@ -17,6 +17,7 @@ suite("push_down_min_through_join") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" diff --git a/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_sum_through_join.groovy b/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_sum_through_join.groovy index 4e2a13d644480d8..e51899dcc3d69bf 100644 --- a/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_sum_through_join.groovy +++ b/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_sum_through_join.groovy @@ -17,6 +17,7 @@ suite("push_down_sum_through_join") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" diff --git a/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_sum_through_join_one_side.groovy b/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_sum_through_join_one_side.groovy index 866c755d5b0472f..1ecc6aa48a8bf9e 100644 --- a/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_sum_through_join_one_side.groovy +++ b/regression-test/suites/nereids_rules_p0/eager_aggregate/push_down_sum_through_join_one_side.groovy @@ -17,6 +17,7 @@ suite("push_down_sum_through_join_one_side") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" diff --git a/regression-test/suites/nereids_rules_p0/eliminate_join_condition/eliminate_join_condition.groovy b/regression-test/suites/nereids_rules_p0/eliminate_join_condition/eliminate_join_condition.groovy index f6833de6bd573e8..c3f420a7544151b 100644 --- a/regression-test/suites/nereids_rules_p0/eliminate_join_condition/eliminate_join_condition.groovy +++ b/regression-test/suites/nereids_rules_p0/eliminate_join_condition/eliminate_join_condition.groovy @@ -17,6 +17,7 @@ suite("eliminate_join_condition") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" sql "SET disable_join_reorder=true" diff --git a/regression-test/suites/nereids_rules_p0/eliminate_not_null/eliminate_not_null.groovy b/regression-test/suites/nereids_rules_p0/eliminate_not_null/eliminate_not_null.groovy index cc318a3e854f73d..3a72a6b88b9aebd 100644 --- a/regression-test/suites/nereids_rules_p0/eliminate_not_null/eliminate_not_null.groovy +++ b/regression-test/suites/nereids_rules_p0/eliminate_not_null/eliminate_not_null.groovy @@ -17,6 +17,7 @@ suite("eliminate_not_null") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" sql "SET disable_join_reorder=true" diff --git a/regression-test/suites/nereids_rules_p0/eliminate_outer_join/eliminate_outer_join.groovy b/regression-test/suites/nereids_rules_p0/eliminate_outer_join/eliminate_outer_join.groovy index b2f5856880dc9dc..40e596ae5a6152b 100644 --- a/regression-test/suites/nereids_rules_p0/eliminate_outer_join/eliminate_outer_join.groovy +++ b/regression-test/suites/nereids_rules_p0/eliminate_outer_join/eliminate_outer_join.groovy @@ -17,6 +17,7 @@ suite("eliminate_outer_join") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """ diff --git a/regression-test/suites/nereids_rules_p0/filter_push_down/filter_push_through_aggregate.groovy b/regression-test/suites/nereids_rules_p0/filter_push_down/filter_push_through_aggregate.groovy index 735aaa939e6364e..0c36def8b97002c 100644 --- a/regression-test/suites/nereids_rules_p0/filter_push_down/filter_push_through_aggregate.groovy +++ b/regression-test/suites/nereids_rules_p0/filter_push_down/filter_push_through_aggregate.groovy @@ -17,6 +17,7 @@ suite("filter_push_through_aggregate") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" sql "SET disable_join_reorder=true" diff --git a/regression-test/suites/nereids_rules_p0/filter_push_down/push_down_alias_through_join.groovy b/regression-test/suites/nereids_rules_p0/filter_push_down/push_down_alias_through_join.groovy index af10e6461e9e4fe..ae005da4678b058 100644 --- a/regression-test/suites/nereids_rules_p0/filter_push_down/push_down_alias_through_join.groovy +++ b/regression-test/suites/nereids_rules_p0/filter_push_down/push_down_alias_through_join.groovy @@ -17,6 +17,7 @@ suite("push_down_alias_through_join") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "use regression_test_nereids_rules_p0" sql "set disable_join_reorder=true" diff --git a/regression-test/suites/nereids_rules_p0/filter_push_down/push_down_expression_in_hash_join.groovy b/regression-test/suites/nereids_rules_p0/filter_push_down/push_down_expression_in_hash_join.groovy index 1dcccd13e1d0c44..b448276e551a91f 100644 --- a/regression-test/suites/nereids_rules_p0/filter_push_down/push_down_expression_in_hash_join.groovy +++ b/regression-test/suites/nereids_rules_p0/filter_push_down/push_down_expression_in_hash_join.groovy @@ -17,6 +17,7 @@ suite("push_down_expression_in_hash_join") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "use regression_test_nereids_rules_p0" sql "set disable_join_reorder=true" diff --git a/regression-test/suites/nereids_rules_p0/filter_push_down/push_down_filter_other_condition.groovy b/regression-test/suites/nereids_rules_p0/filter_push_down/push_down_filter_other_condition.groovy index 42c0003ffee9d1b..b08bdac49af5e55 100644 --- a/regression-test/suites/nereids_rules_p0/filter_push_down/push_down_filter_other_condition.groovy +++ b/regression-test/suites/nereids_rules_p0/filter_push_down/push_down_filter_other_condition.groovy @@ -17,6 +17,7 @@ suite("push_down_filter_other_condition") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" sql "use regression_test_nereids_rules_p0" diff --git a/regression-test/suites/nereids_rules_p0/filter_push_down/push_filter_inside_join.groovy b/regression-test/suites/nereids_rules_p0/filter_push_down/push_filter_inside_join.groovy index 9dfadb9630bacb4..97fdc1c0b4f24ec 100644 --- a/regression-test/suites/nereids_rules_p0/filter_push_down/push_filter_inside_join.groovy +++ b/regression-test/suites/nereids_rules_p0/filter_push_down/push_filter_inside_join.groovy @@ -17,6 +17,7 @@ suite("push_filter_inside_join") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "use regression_test_nereids_rules_p0" sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" diff --git a/regression-test/suites/nereids_rules_p0/filter_push_down/push_filter_through.groovy b/regression-test/suites/nereids_rules_p0/filter_push_down/push_filter_through.groovy index 4e716dde38023ae..0c9229249b0a3ef 100644 --- a/regression-test/suites/nereids_rules_p0/filter_push_down/push_filter_through.groovy +++ b/regression-test/suites/nereids_rules_p0/filter_push_down/push_filter_through.groovy @@ -17,6 +17,7 @@ suite("push_filter_through") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "use regression_test_nereids_rules_p0" sql "set disable_join_reorder=true" diff --git a/regression-test/suites/nereids_rules_p0/infer_set_operator_distinct/infer_set_operator_distinct.groovy b/regression-test/suites/nereids_rules_p0/infer_set_operator_distinct/infer_set_operator_distinct.groovy index 7e27fc9c6dc1f99..eb2be5e16d74175 100644 --- a/regression-test/suites/nereids_rules_p0/infer_set_operator_distinct/infer_set_operator_distinct.groovy +++ b/regression-test/suites/nereids_rules_p0/infer_set_operator_distinct/infer_set_operator_distinct.groovy @@ -17,6 +17,7 @@ suite("test_infer_set_operator_distinct") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """ diff --git a/regression-test/suites/nereids_rules_p0/limit_push_down/limit_push_down.groovy b/regression-test/suites/nereids_rules_p0/limit_push_down/limit_push_down.groovy index 39aa020ee6ac70a..2f1ff4b4ee671a3 100644 --- a/regression-test/suites/nereids_rules_p0/limit_push_down/limit_push_down.groovy +++ b/regression-test/suites/nereids_rules_p0/limit_push_down/limit_push_down.groovy @@ -17,6 +17,7 @@ suite("limit_push_down") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "use regression_test_nereids_rules_p0" sql """ SET inline_cte_referenced_threshold=0 """ diff --git a/regression-test/suites/nereids_rules_p0/limit_push_down/order_push_down.groovy b/regression-test/suites/nereids_rules_p0/limit_push_down/order_push_down.groovy index f6caf529a38750e..dae89145d7a3b22 100644 --- a/regression-test/suites/nereids_rules_p0/limit_push_down/order_push_down.groovy +++ b/regression-test/suites/nereids_rules_p0/limit_push_down/order_push_down.groovy @@ -17,6 +17,7 @@ suite("order_push_down") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "use regression_test_nereids_rules_p0" sql """ SET inline_cte_referenced_threshold=0 """ diff --git a/regression-test/suites/nereids_rules_p0/load.groovy b/regression-test/suites/nereids_rules_p0/load.groovy index 0468e3e1e44c7c2..662c5d7e78066c3 100644 --- a/regression-test/suites/nereids_rules_p0/load.groovy +++ b/regression-test/suites/nereids_rules_p0/load.groovy @@ -17,6 +17,7 @@ suite("load") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """ DROP TABLE IF EXISTS t1 diff --git a/regression-test/suites/nereids_rules_p0/mv/agg_with_roll_up/aggregate_with_roll_up.groovy b/regression-test/suites/nereids_rules_p0/mv/agg_with_roll_up/aggregate_with_roll_up.groovy index 02f628b4f6c61d4..9cdca736fe5238a 100644 --- a/regression-test/suites/nereids_rules_p0/mv/agg_with_roll_up/aggregate_with_roll_up.groovy +++ b/regression-test/suites/nereids_rules_p0/mv/agg_with_roll_up/aggregate_with_roll_up.groovy @@ -19,6 +19,8 @@ suite("aggregate_with_roll_up") { String db = context.config.getDbNameByFile(context.file) sql "use ${db}" sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF"; + sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" sql "SET enable_fallback_to_original_planner=false" sql "SET enable_materialized_view_rewrite=true" sql "SET enable_nereids_timeout = false" diff --git a/regression-test/suites/nereids_rules_p0/mv/agg_without_roll_up/aggregate_without_roll_up.groovy b/regression-test/suites/nereids_rules_p0/mv/agg_without_roll_up/aggregate_without_roll_up.groovy index d1d01ebee84e556..be56ebe05f6a765 100644 --- a/regression-test/suites/nereids_rules_p0/mv/agg_without_roll_up/aggregate_without_roll_up.groovy +++ b/regression-test/suites/nereids_rules_p0/mv/agg_without_roll_up/aggregate_without_roll_up.groovy @@ -19,6 +19,8 @@ suite("aggregate_without_roll_up") { String db = context.config.getDbNameByFile(context.file) sql "use ${db}" sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF"; + sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" sql "SET enable_fallback_to_original_planner=false" sql "SET enable_materialized_view_rewrite=true" sql "SET enable_nereids_timeout = false" diff --git a/regression-test/suites/nereids_rules_p0/mv/join/inner/inner_join.groovy b/regression-test/suites/nereids_rules_p0/mv/join/inner/inner_join.groovy index 5fd4124f4d6cbbf..401dbd279a17d1c 100644 --- a/regression-test/suites/nereids_rules_p0/mv/join/inner/inner_join.groovy +++ b/regression-test/suites/nereids_rules_p0/mv/join/inner/inner_join.groovy @@ -19,6 +19,7 @@ suite("inner_join") { String db = context.config.getDbNameByFile(context.file) sql "use ${db}" sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "SET enable_materialized_view_rewrite=true" sql "SET enable_nereids_timeout = false" diff --git a/regression-test/suites/nereids_rules_p0/mv/join/left_outer/outer_join.groovy b/regression-test/suites/nereids_rules_p0/mv/join/left_outer/outer_join.groovy index a9e21f63cf6a858..a407b4cd4724f61 100644 --- a/regression-test/suites/nereids_rules_p0/mv/join/left_outer/outer_join.groovy +++ b/regression-test/suites/nereids_rules_p0/mv/join/left_outer/outer_join.groovy @@ -19,6 +19,8 @@ suite("outer_join") { String db = context.config.getDbNameByFile(context.file) sql "use ${db}" sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF"; + sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" sql "SET enable_fallback_to_original_planner=false" sql "SET enable_materialized_view_rewrite=true" sql "SET enable_nereids_timeout = false" diff --git a/regression-test/suites/nereids_rules_p0/mv/partition_mv_rewrite.groovy b/regression-test/suites/nereids_rules_p0/mv/partition_mv_rewrite.groovy index 428e6a9e1301a1c..e799c01fff96b9d 100644 --- a/regression-test/suites/nereids_rules_p0/mv/partition_mv_rewrite.groovy +++ b/regression-test/suites/nereids_rules_p0/mv/partition_mv_rewrite.groovy @@ -19,6 +19,7 @@ suite("partition_mv_rewrite") { String db = context.config.getDbNameByFile(context.file) sql "use ${db}" sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "SET enable_materialized_view_rewrite=true" sql "SET enable_nereids_timeout = false" diff --git a/regression-test/suites/nereids_rules_p0/partition_prune/test_date_function_prune.groovy b/regression-test/suites/nereids_rules_p0/partition_prune/test_date_function_prune.groovy index 965983596a00cc1..6f5706723167b50 100644 --- a/regression-test/suites/nereids_rules_p0/partition_prune/test_date_function_prune.groovy +++ b/regression-test/suites/nereids_rules_p0/partition_prune/test_date_function_prune.groovy @@ -19,6 +19,8 @@ suite("test_date_function_prune") { String db = context.config.getDbNameByFile(context.file) sql "use ${db}" sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF"; + sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" sql "SET enable_fallback_to_original_planner=false" sql "set partition_pruning_expand_threshold=10;" sql "drop table if exists dp" diff --git a/regression-test/suites/nereids_rules_p0/partition_prune/test_multi_range_partition.groovy b/regression-test/suites/nereids_rules_p0/partition_prune/test_multi_range_partition.groovy index 0ee78332a16da9c..23fad332f4c43d1 100644 --- a/regression-test/suites/nereids_rules_p0/partition_prune/test_multi_range_partition.groovy +++ b/regression-test/suites/nereids_rules_p0/partition_prune/test_multi_range_partition.groovy @@ -19,6 +19,7 @@ suite("test_multi_range_partition") { String db = context.config.getDbNameByFile(context.file) sql "use ${db}" sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "set partition_pruning_expand_threshold=10;" sql "drop table if exists pt" @@ -296,6 +297,7 @@ suite("test_multi_range_partition") { ); """ sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "insert into tt values (0, 0), (6, 6), (8, 8)" explain { diff --git a/regression-test/suites/nereids_rules_p0/partition_prune/test_partition_unique_model.groovy b/regression-test/suites/nereids_rules_p0/partition_prune/test_partition_unique_model.groovy index cfd4f28e6053e9f..c95d34ffb385077 100644 --- a/regression-test/suites/nereids_rules_p0/partition_prune/test_partition_unique_model.groovy +++ b/regression-test/suites/nereids_rules_p0/partition_prune/test_partition_unique_model.groovy @@ -20,6 +20,7 @@ suite("test_partition_unique_model") { String db = context.config.getDbNameByFile(context.file) sql "use ${db}" sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "set partition_pruning_expand_threshold=10;" sql "drop table if exists xinan;" diff --git a/regression-test/suites/nereids_rules_p0/pkfk/eliminate_inner.groovy b/regression-test/suites/nereids_rules_p0/pkfk/eliminate_inner.groovy index 64a530cc225516a..5280847a53a8ef7 100644 --- a/regression-test/suites/nereids_rules_p0/pkfk/eliminate_inner.groovy +++ b/regression-test/suites/nereids_rules_p0/pkfk/eliminate_inner.groovy @@ -17,6 +17,8 @@ suite("eliminate_inner") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF"; + sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" sql "SET enable_fallback_to_original_planner=false" sql "SET ignore_shape_nodes='PhysicalDistribute[DistributionSpecGather], PhysicalDistribute[DistributionSpecHash],PhysicalDistribute[DistributionSpecExecutionAny],PhysicalProject'" sql "SET disable_join_reorder=true" diff --git a/regression-test/suites/nereids_rules_p0/predicate_infer/infer_predicate.groovy b/regression-test/suites/nereids_rules_p0/predicate_infer/infer_predicate.groovy index e2550d73b9306f3..b9bdbdf3e7b868f 100644 --- a/regression-test/suites/nereids_rules_p0/predicate_infer/infer_predicate.groovy +++ b/regression-test/suites/nereids_rules_p0/predicate_infer/infer_predicate.groovy @@ -17,6 +17,7 @@ suite("infer_predicate") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "SET disable_join_reorder=true" sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" diff --git a/regression-test/suites/nereids_rules_p0/push_down_limit_distinct/push_down_limit_distinct.groovy b/regression-test/suites/nereids_rules_p0/push_down_limit_distinct/push_down_limit_distinct.groovy index d63fed508785264..f333cbee8962b80 100644 --- a/regression-test/suites/nereids_rules_p0/push_down_limit_distinct/push_down_limit_distinct.groovy +++ b/regression-test/suites/nereids_rules_p0/push_down_limit_distinct/push_down_limit_distinct.groovy @@ -17,6 +17,7 @@ suite("push_down_limit_distinct_through_join") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" sql "SET disable_join_reorder=true" diff --git a/regression-test/suites/nereids_rules_p0/push_down_top_n/push_down_top_n_distinct_through_union.groovy b/regression-test/suites/nereids_rules_p0/push_down_top_n/push_down_top_n_distinct_through_union.groovy index c7a56da147f642e..359846b2f8929ea 100644 --- a/regression-test/suites/nereids_rules_p0/push_down_top_n/push_down_top_n_distinct_through_union.groovy +++ b/regression-test/suites/nereids_rules_p0/push_down_top_n/push_down_top_n_distinct_through_union.groovy @@ -17,6 +17,7 @@ suite("push_down_top_n_distinct_through_union") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" sql "SET disable_join_reorder=true" diff --git a/regression-test/suites/nereids_rules_p0/push_down_top_n/push_down_top_n_through_union.groovy b/regression-test/suites/nereids_rules_p0/push_down_top_n/push_down_top_n_through_union.groovy index 974c9ab3ba1923d..acf96e3263610e6 100644 --- a/regression-test/suites/nereids_rules_p0/push_down_top_n/push_down_top_n_through_union.groovy +++ b/regression-test/suites/nereids_rules_p0/push_down_top_n/push_down_top_n_through_union.groovy @@ -17,6 +17,7 @@ suite("push_down_top_n_through_union") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" sql "SET disable_join_reorder=true" diff --git a/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_and_subquery.groovy b/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_and_subquery.groovy index b78c6e21f7fbbe6..4cd48f1656eba0c 100644 --- a/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_and_subquery.groovy +++ b/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_and_subquery.groovy @@ -17,6 +17,7 @@ suite("subquery_basic_pullup_and_subquery") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """DROP TABLE IF EXISTS t1;""" diff --git a/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_basic.groovy b/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_basic.groovy index 8621440a529c36c..024e257316aafad 100644 --- a/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_basic.groovy +++ b/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_basic.groovy @@ -17,6 +17,7 @@ suite("subquery_basic_pullup_basic") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """DROP TABLE IF EXISTS t1;""" diff --git a/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_or.groovy b/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_or.groovy index 6b3cf46e484254c..92f10ef7a7daad5 100644 --- a/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_or.groovy +++ b/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_or.groovy @@ -17,6 +17,7 @@ suite("subquery_basic_pullup_or") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """DROP TABLE IF EXISTS t1;""" diff --git a/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_or_subquery.groovy b/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_or_subquery.groovy index 76f2b7ce7abd134..2eff98be9dde201 100644 --- a/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_or_subquery.groovy +++ b/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_or_subquery.groovy @@ -17,6 +17,7 @@ suite("subquery_basic_pullup_or_subquery") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """DROP TABLE IF EXISTS t1;""" diff --git a/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_uk.groovy b/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_uk.groovy index c3eeb0f090d42bb..8cedc143bc2e17e 100644 --- a/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_uk.groovy +++ b/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_pullup_uk.groovy @@ -17,6 +17,7 @@ suite("subquery_basic_pullup_uk") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """DROP TABLE IF EXISTS t1;""" diff --git a/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_ullup_and.groovy b/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_ullup_and.groovy index f054e58fa6f740d..80bbac5a7416e6e 100644 --- a/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_ullup_and.groovy +++ b/regression-test/suites/nereids_rules_p0/subquery/basic/subquery_basic_ullup_and.groovy @@ -17,6 +17,7 @@ suite("subquery_basic_pullup_and") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """DROP TABLE IF EXISTS t1;""" diff --git a/regression-test/suites/nereids_rules_p0/subquery/misc/subquery_misc_pullup_dml.groovy b/regression-test/suites/nereids_rules_p0/subquery/misc/subquery_misc_pullup_dml.groovy index faf8ca9102738ed..f96386b6d73ad30 100644 --- a/regression-test/suites/nereids_rules_p0/subquery/misc/subquery_misc_pullup_dml.groovy +++ b/regression-test/suites/nereids_rules_p0/subquery/misc/subquery_misc_pullup_dml.groovy @@ -17,6 +17,7 @@ suite("subquery_basic_pullup_dml") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """DROP TABLE IF EXISTS t1;""" diff --git a/regression-test/suites/nereids_rules_p0/subquery/misc/subquery_misc_pullup_misc.groovy b/regression-test/suites/nereids_rules_p0/subquery/misc/subquery_misc_pullup_misc.groovy index 0602210f3918731..2c3bf456e6a70ea 100644 --- a/regression-test/suites/nereids_rules_p0/subquery/misc/subquery_misc_pullup_misc.groovy +++ b/regression-test/suites/nereids_rules_p0/subquery/misc/subquery_misc_pullup_misc.groovy @@ -17,6 +17,7 @@ suite("subquery_misc_pullup_misc") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """DROP TABLE IF EXISTS t1;""" diff --git a/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_and.groovy b/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_and.groovy index 51dfa5022323d32..d4d63959278be25 100644 --- a/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_and.groovy +++ b/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_and.groovy @@ -17,6 +17,7 @@ suite("subquery_multitable_pullup_and") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """DROP TABLE IF EXISTS t1;""" diff --git a/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_and_subquery.groovy b/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_and_subquery.groovy index 5c1318f8c9f3e06..a73f23926ffa42c 100644 --- a/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_and_subquery.groovy +++ b/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_and_subquery.groovy @@ -17,6 +17,7 @@ suite("subquery_multitable_pullup_and_subquery") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """DROP TABLE IF EXISTS t1;""" diff --git a/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_basic.groovy b/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_basic.groovy index 5e9df4ffc796b1b..f07298ebf8bb5d1 100644 --- a/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_basic.groovy +++ b/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_basic.groovy @@ -17,6 +17,7 @@ suite("subquery_multitable_pullup_basic") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """DROP TABLE IF EXISTS t1;""" diff --git a/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_or.groovy b/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_or.groovy index 67168365f014837..7d9f591ac1855c5 100644 --- a/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_or.groovy +++ b/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_or.groovy @@ -17,6 +17,7 @@ suite("subquery_multitable_pullup_or") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """DROP TABLE IF EXISTS t1;""" diff --git a/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_or_subquery.groovy b/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_or_subquery.groovy index 8a41ecd25109c5a..16fd8e0578a5b27 100644 --- a/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_or_subquery.groovy +++ b/regression-test/suites/nereids_rules_p0/subquery/multitable/subquery_multitable_pullup_or_subquery.groovy @@ -17,6 +17,7 @@ suite("subquery_multitable_pullup_or_subquery") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """DROP TABLE IF EXISTS t1;""" diff --git a/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_groupby.groovy b/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_groupby.groovy index 3323caa364cfde5..715b4ada3c3d79a 100644 --- a/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_groupby.groovy +++ b/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_groupby.groovy @@ -17,6 +17,7 @@ suite("subquery_basic_pullup_orderby") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """DROP TABLE IF EXISTS t1;""" diff --git a/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_having.groovy b/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_having.groovy index 63d58b1ccb2a7a2..95afad3a66a1759 100644 --- a/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_having.groovy +++ b/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_having.groovy @@ -17,6 +17,7 @@ suite("subquery_basic_pullup_having") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """DROP TABLE IF EXISTS t1;""" diff --git a/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_orderby.groovy b/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_orderby.groovy index 97a8227dd81535a..1c18975c0da95cb 100644 --- a/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_orderby.groovy +++ b/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_orderby.groovy @@ -17,6 +17,7 @@ suite("subquery_basic_pullup_orderby") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """DROP TABLE IF EXISTS t1;""" diff --git a/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_selectlist.groovy b/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_selectlist.groovy index 9ab597472106ea6..e43b406560699c2 100644 --- a/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_selectlist.groovy +++ b/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_selectlist.groovy @@ -17,6 +17,7 @@ suite("subquery_basic_pullup_selectlist") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """DROP TABLE IF EXISTS t1;""" diff --git a/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_winfunc.groovy b/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_winfunc.groovy index d8e7e65ac8c55d8..a2a3466d1834371 100644 --- a/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_winfunc.groovy +++ b/regression-test/suites/nereids_rules_p0/subquery/topop/subquery_topop_pullup_winfunc.groovy @@ -17,6 +17,7 @@ suite("subquery_basic_pullup_winfunc") { sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF" sql "SET enable_fallback_to_original_planner=false" sql """DROP TABLE IF EXISTS t1;""" diff --git a/regression-test/suites/nereids_rules_p0/transposeJoin/transposeSemiJoinAgg.groovy b/regression-test/suites/nereids_rules_p0/transposeJoin/transposeSemiJoinAgg.groovy index 5a212727f7d7978..c3784219f79d03a 100644 --- a/regression-test/suites/nereids_rules_p0/transposeJoin/transposeSemiJoinAgg.groovy +++ b/regression-test/suites/nereids_rules_p0/transposeJoin/transposeSemiJoinAgg.groovy @@ -20,6 +20,8 @@ suite("transposeSemiJoinAgg") { String db = context.config.getDbNameByFile(context.file) sql "use ${db}" sql "SET enable_nereids_planner=true" + sql "set runtime_filter_mode=OFF"; + sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" sql "SET enable_fallback_to_original_planner=false" sql "set partition_pruning_expand_threshold=10;" sql "set ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/ddl/shape.tmpl b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/ddl/shape.tmpl index a4bd931311e5474..24519d50e49f401 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/ddl/shape.tmpl +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/ddl/shape.tmpl @@ -29,6 +29,7 @@ suite("query{--}") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """{query}""" qt_ds_shape_{--} ''' diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query1.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query1.groovy index f9e6a330af69050..26e49347783e1a5 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query1.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query1.groovy @@ -29,6 +29,7 @@ suite("query1") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with customer_total_return as (select sr_customer_sk as ctr_customer_sk diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query10.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query10.groovy index accc3fe027f109c..03b1d2bca944b3f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query10.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query10.groovy @@ -29,6 +29,7 @@ suite("query10") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select cd_gender, diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query11.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query11.groovy index 1696ab1f54c0b12..f7bcf22622a1db7 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query11.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query11.groovy @@ -29,6 +29,7 @@ suite("query11") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with year_total as ( select c_customer_id customer_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query12.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query12.groovy index 3e38e7b31eb3461..4e512d43c281da2 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query12.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query12.groovy @@ -29,6 +29,7 @@ suite("query12") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select i_item_id ,i_item_desc diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query13.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query13.groovy index f4cd48bd0b29a6e..2f3d00c324c7618 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query13.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query13.groovy @@ -29,6 +29,7 @@ suite("query13") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select avg(ss_quantity) ,avg(ss_ext_sales_price) diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query14.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query14.groovy index ea76870e685cd60..47a7ee463aab3bb 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query14.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query14.groovy @@ -29,6 +29,7 @@ suite("query14") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with cross_items as (select i_item_sk ss_item_sk diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query15.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query15.groovy index d0cf611893591bc..91b7e5a036c810b 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query15.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query15.groovy @@ -29,6 +29,7 @@ suite("query15") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select ca_zip ,sum(cs_sales_price) diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query16.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query16.groovy index a4af3ebcf3d511f..0ae40d15fc16070 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query16.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query16.groovy @@ -29,6 +29,7 @@ suite("query16") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select count(distinct cs_order_number) as "order count" diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query17.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query17.groovy index a3457b502004260..3f82efd83d8249d 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query17.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query17.groovy @@ -29,6 +29,7 @@ suite("query17") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select i_item_id ,i_item_desc diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query18.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query18.groovy index 860c3ff3a59c3a7..4279a94780a7dff 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query18.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query18.groovy @@ -29,6 +29,7 @@ suite("query18") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select i_item_id, ca_country, diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query19.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query19.groovy index a74d0593dec5475..dc5714b7b39d7f8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query19.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query19.groovy @@ -29,6 +29,7 @@ suite("query19") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, sum(ss_ext_sales_price) ext_price diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query2.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query2.groovy index c55063a80eed8b4..f9897201e8a0008 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query2.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query2.groovy @@ -29,6 +29,7 @@ suite("query2") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with wscs as (select sold_date_sk diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query20.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query20.groovy index 96846c9204d062f..6db88b75dd9ec74 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query20.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query20.groovy @@ -29,6 +29,7 @@ suite("query20") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select i_item_id ,i_item_desc diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query21.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query21.groovy index 6641a42d9ce42e0..bf872a39e4e07e8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query21.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query21.groovy @@ -29,6 +29,7 @@ suite("query21") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select * from(select w_warehouse_name diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query22.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query22.groovy index 8d965c3cf7b38c1..a5acdc2d78a4054 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query22.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query22.groovy @@ -29,6 +29,7 @@ suite("query22") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select i_product_name ,i_brand diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query23.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query23.groovy index d023e6e96bc616a..3d3b936bfabf8df 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query23.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query23.groovy @@ -29,6 +29,7 @@ suite("query23") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with frequent_ss_items as (select substr(i_item_desc,1,30) itemdesc,i_item_sk item_sk,d_date solddate,count(*) cnt diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query24.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query24.groovy index a981616bf09095b..42f2f0d839450c9 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query24.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query24.groovy @@ -29,6 +29,7 @@ suite("query24") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with ssales as (select c_last_name diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query25.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query25.groovy index 04894128fc7a848..49eaec755597cf0 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query25.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query25.groovy @@ -29,6 +29,7 @@ suite("query25") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select i_item_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query26.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query26.groovy index 455d18fdd512ee0..ef1f6fb222c69f3 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query26.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query26.groovy @@ -29,6 +29,7 @@ suite("query26") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select i_item_id, avg(cs_quantity) agg1, diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query27.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query27.groovy index 0ef65b344941dc8..b67c90cdd92564e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query27.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query27.groovy @@ -29,6 +29,7 @@ suite("query27") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select i_item_id, s_state, grouping(s_state) g_state, diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query28.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query28.groovy index 037f372448a4972..38c60856ea0241b 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query28.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query28.groovy @@ -29,6 +29,7 @@ suite("query28") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select * from (select avg(ss_list_price) B1_LP diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query29.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query29.groovy index 32dcc775f069c78..32777541771fa54 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query29.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query29.groovy @@ -29,6 +29,7 @@ suite("query29") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select i_item_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query3.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query3.groovy index f537961471ae14c..4ccdbce73bb7f55 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query3.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query3.groovy @@ -29,6 +29,7 @@ suite("query3") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select dt.d_year ,item.i_brand_id brand_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query30.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query30.groovy index 287051b03c1e112..a1277ccb82b29a8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query30.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query30.groovy @@ -29,6 +29,7 @@ suite("query30") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with customer_total_return as (select wr_returning_customer_sk as ctr_customer_sk diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query31.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query31.groovy index 05a4f307461326b..9114e7c747b850c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query31.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query31.groovy @@ -29,6 +29,7 @@ suite("query31") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with ss as (select ca_county,d_qoy, d_year,sum(ss_ext_sales_price) as store_sales diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query32.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query32.groovy index 2b7bb4c396be390..3a1c6518af7b770 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query32.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query32.groovy @@ -29,6 +29,7 @@ suite("query32") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select sum(cs_ext_discount_amt) as "excess discount amount" from diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query33.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query33.groovy index 9b802bed918ab9d..a7ab465ac5c2404 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query33.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query33.groovy @@ -29,6 +29,7 @@ suite("query33") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with ss as ( select diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query34.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query34.groovy index e3791dce0b241d3..26de532c56a7600 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query34.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query34.groovy @@ -29,6 +29,7 @@ suite("query34") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select c_last_name ,c_first_name diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query35.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query35.groovy index 89aee2bebcd5fe5..afae0a31fb88a8c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query35.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query35.groovy @@ -29,6 +29,7 @@ suite("query35") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select ca_state, diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query36.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query36.groovy index 2019b801153a404..2cc21b564db50ad 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query36.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query36.groovy @@ -29,6 +29,7 @@ suite("query36") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select sum(ss_net_profit)/sum(ss_ext_sales_price) as gross_margin diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query37.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query37.groovy index 923d22ee914fa63..6e93b1ae1a463e3 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query37.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query37.groovy @@ -29,6 +29,7 @@ suite("query37") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select i_item_id ,i_item_desc diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query38.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query38.groovy index c2f269bffa0f8b2..63951c511040ab2 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query38.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query38.groovy @@ -29,6 +29,7 @@ suite("query38") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select count(*) from ( select distinct c_last_name, c_first_name, d_date diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query39.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query39.groovy index 4991d68aa416291..cc3130829bd417f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query39.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query39.groovy @@ -29,6 +29,7 @@ suite("query39") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with inv as (select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query4.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query4.groovy index 3aeb28dcaa3f315..363b6019b73baf3 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query4.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query4.groovy @@ -29,6 +29,7 @@ suite("query4") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with year_total as ( select c_customer_id customer_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query40.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query40.groovy index e89ddadcfbd7980..bf3c8bcba57348f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query40.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query40.groovy @@ -29,6 +29,7 @@ suite("query40") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select w_state diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query41.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query41.groovy index 6d7b53732f0cb18..2da38b937eea389 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query41.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query41.groovy @@ -29,6 +29,7 @@ suite("query41") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select distinct(i_product_name) from item i1 diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query42.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query42.groovy index b24f4c4f500e0d3..2e742a6d5d797fb 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query42.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query42.groovy @@ -29,6 +29,7 @@ suite("query42") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select dt.d_year ,item.i_category_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query43.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query43.groovy index 843549d2e3c53cb..895117ba234db8c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query43.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query43.groovy @@ -29,6 +29,7 @@ suite("query43") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select s_store_name, s_store_id, sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query44.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query44.groovy index e7a854350484868..ee63467db29c8c9 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query44.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query44.groovy @@ -29,6 +29,7 @@ suite("query44") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing from(select * diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query45.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query45.groovy index 0830949b53883df..2dc555b0e742c19 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query45.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query45.groovy @@ -29,6 +29,7 @@ suite("query45") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select ca_zip, ca_city, sum(ws_sales_price) from web_sales, customer, customer_address, date_dim, item diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query46.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query46.groovy index 9b71e7de8c394cd..6352c0cd44d54f6 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query46.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query46.groovy @@ -29,6 +29,7 @@ suite("query46") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select c_last_name ,c_first_name diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query47.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query47.groovy index f95d5fcda767228..65bdaa77b41bff6 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query47.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query47.groovy @@ -29,6 +29,7 @@ suite("query47") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with v1 as( select i_category, i_brand, diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query48.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query48.groovy index 25fc045529bd637..29aa2dd0ddfbb81 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query48.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query48.groovy @@ -29,6 +29,7 @@ suite("query48") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select sum (ss_quantity) from store_sales, store, customer_demographics, customer_address, date_dim diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query49.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query49.groovy index 140ef3fbbe1fe8a..48a8ce0740ca9ef 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query49.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query49.groovy @@ -29,6 +29,7 @@ suite("query49") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select channel, item, return_ratio, return_rank, currency_rank from (select diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query5.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query5.groovy index a06402e87b8571c..230a1f766b85164 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query5.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query5.groovy @@ -29,6 +29,7 @@ suite("query5") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with ssr as (select s_store_id, diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query50.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query50.groovy index a29671586b067f5..8518305731a07ee 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query50.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query50.groovy @@ -29,6 +29,7 @@ suite("query50") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select s_store_name diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query51.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query51.groovy index f940eef65583fa8..e5c49b81df51a7a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query51.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query51.groovy @@ -29,6 +29,7 @@ suite("query51") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """WITH web_v1 as ( select diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query52.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query52.groovy index 8e7ce9d52813715..747015b13901b97 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query52.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query52.groovy @@ -29,6 +29,7 @@ suite("query52") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select dt.d_year ,item.i_brand_id brand_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query53.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query53.groovy index c0e0355b2f8079a..44f6b9298c85d20 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query53.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query53.groovy @@ -29,6 +29,7 @@ suite("query53") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select * from (select i_manufact_id, diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query54.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query54.groovy index f5899a0f10044d1..3d5cb32046c5023 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query54.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query54.groovy @@ -29,6 +29,7 @@ suite("query54") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with my_customers as ( select distinct c_customer_sk diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query55.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query55.groovy index 683180f5fc99cde..17969c92cb8031c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query55.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query55.groovy @@ -29,6 +29,7 @@ suite("query55") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select i_brand_id brand_id, i_brand brand, sum(ss_ext_sales_price) ext_price diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query56.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query56.groovy index f3451694dbbeabc..383cafa5e022756 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query56.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query56.groovy @@ -29,6 +29,7 @@ suite("query56") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with ss as ( select i_item_id,sum(ss_ext_sales_price) total_sales diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query57.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query57.groovy index 0bc931de71e1658..e3e19c808fe21ec 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query57.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query57.groovy @@ -29,6 +29,7 @@ suite("query57") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with v1 as( select i_category, i_brand, diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query58.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query58.groovy index b227b5d1f491ceb..3539d9544b31046 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query58.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query58.groovy @@ -29,6 +29,7 @@ suite("query58") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with ss_items as (select i_item_id item_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query59.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query59.groovy index 1281bf8cdf4ce94..47bff370156aa12 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query59.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query59.groovy @@ -29,6 +29,7 @@ suite("query59") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with wss as (select d_week_seq, diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query6.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query6.groovy index dd2dcab7ec144a7..4fb62187bcae209 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query6.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query6.groovy @@ -29,6 +29,7 @@ suite("query6") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select a.ca_state state, count(*) cnt from customer_address a diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query60.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query60.groovy index 5f0b8468ef71924..461e65ed723921e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query60.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query60.groovy @@ -29,6 +29,7 @@ suite("query60") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with ss as ( select diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query61.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query61.groovy index b722823808d88a6..7ed7c6a5258fa34 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query61.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query61.groovy @@ -29,6 +29,7 @@ suite("query61") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 from diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query62.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query62.groovy index 1d56afef5db9bf0..49aa2868657af0b 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query62.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query62.groovy @@ -29,6 +29,7 @@ suite("query62") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select substr(w_warehouse_name,1,20) diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query63.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query63.groovy index 1e81e214b37d840..621d5f890846da4 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query63.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query63.groovy @@ -29,6 +29,7 @@ suite("query63") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select * from (select i_manager_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query64.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query64.groovy index beef03a1ffe2c72..55190653ec09282 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query64.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query64.groovy @@ -29,6 +29,7 @@ suite("query64") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with cs_ui as (select cs_item_sk diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query65.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query65.groovy index ebb95d12bfbee14..ebbeea8abd12213 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query65.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query65.groovy @@ -29,6 +29,7 @@ suite("query65") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select s_store_name, diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query66.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query66.groovy index 3c64236ca495226..5fe60e2cab3e2be 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query66.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query66.groovy @@ -29,6 +29,7 @@ suite("query66") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select w_warehouse_name diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query67.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query67.groovy index 623de6fdec332cf..b770846d3edc496 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query67.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query67.groovy @@ -29,6 +29,7 @@ suite("query67") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select * from (select i_category diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query68.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query68.groovy index 4cd22e62b2e0dc9..0d6f6e95e2253c7 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query68.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query68.groovy @@ -29,6 +29,7 @@ suite("query68") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select c_last_name ,c_first_name diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query69.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query69.groovy index 83c72feeac6ab30..e5566c3a9bee3c7 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query69.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query69.groovy @@ -29,6 +29,7 @@ suite("query69") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select cd_gender, diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query7.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query7.groovy index 746679343fc82e6..8b7fd9175f2b6b7 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query7.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query7.groovy @@ -29,6 +29,7 @@ suite("query7") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select i_item_id, avg(ss_quantity) agg1, diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query70.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query70.groovy index 149fa5aa6425d16..1cf13e956b811b7 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query70.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query70.groovy @@ -29,6 +29,7 @@ suite("query70") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select sum(ss_net_profit) as total_sum diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query71.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query71.groovy index 667a9a5a5ff1b94..10f7a8be9c9a296 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query71.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query71.groovy @@ -29,6 +29,7 @@ suite("query71") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select i_brand_id brand_id, i_brand brand,t_hour,t_minute, sum(ext_price) ext_price diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query72.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query72.groovy index a01c328adb39cbd..4f9a018e2e093b3 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query72.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query72.groovy @@ -29,6 +29,7 @@ suite("query72") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select i_item_desc ,w_warehouse_name diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query73.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query73.groovy index 525a259de0b45f2..55448730834b2ac 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query73.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query73.groovy @@ -29,6 +29,7 @@ suite("query73") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select c_last_name ,c_first_name diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query74.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query74.groovy index e6db1db2239f745..f42dc7c6a45cba6 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query74.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query74.groovy @@ -29,6 +29,7 @@ suite("query74") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with year_total as ( select c_customer_id customer_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query75.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query75.groovy index d38b46371ff1c82..9eeb254104cd4c7 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query75.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query75.groovy @@ -29,6 +29,7 @@ suite("query75") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """WITH all_sales AS ( SELECT d_year diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query76.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query76.groovy index c4ad3be83924bbc..bf5f4c8d97efd1a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query76.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query76.groovy @@ -29,6 +29,7 @@ suite("query76") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select channel, col_name, d_year, d_qoy, i_category, COUNT(*) sales_cnt, SUM(ext_sales_price) sales_amt FROM ( SELECT 'store' as channel, 'ss_customer_sk' col_name, d_year, d_qoy, i_category, ss_ext_sales_price ext_sales_price diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query77.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query77.groovy index d45e6bb1545ab95..45693e6f1b48462 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query77.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query77.groovy @@ -29,6 +29,7 @@ suite("query77") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with ss as (select s_store_sk, diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query78.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query78.groovy index adc3ae2027eb3a5..7a8b57e7679a3e8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query78.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query78.groovy @@ -29,6 +29,7 @@ suite("query78") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with ws as (select d_year AS ws_sold_year, ws_item_sk, diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query79.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query79.groovy index 0b3679060722c22..fde72e0859f4a6f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query79.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query79.groovy @@ -29,6 +29,7 @@ suite("query79") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select c_last_name,c_first_name,substr(s_city,1,30),ss_ticket_number,amt,profit diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query8.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query8.groovy index 301736c4dfa277a..e8f8e6f4b03d7e2 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query8.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query8.groovy @@ -29,6 +29,7 @@ suite("query8") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select s_store_name ,sum(ss_net_profit) diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query80.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query80.groovy index 4a78451d5ece76c..64e7f913ed96c0e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query80.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query80.groovy @@ -29,6 +29,7 @@ suite("query80") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with ssr as (select s_store_id as store_id, diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query81.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query81.groovy index 7f24cbe89b5648f..f058cff75ef5309 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query81.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query81.groovy @@ -29,6 +29,7 @@ suite("query81") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with customer_total_return as (select cr_returning_customer_sk as ctr_customer_sk diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query82.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query82.groovy index b662f9805954259..49ca2c51e3e2a17 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query82.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query82.groovy @@ -29,6 +29,7 @@ suite("query82") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select i_item_id ,i_item_desc diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query83.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query83.groovy index 570a2a769308fec..2f0de7342bef94c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query83.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query83.groovy @@ -29,6 +29,7 @@ suite("query83") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with sr_items as (select i_item_id item_id, diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query84.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query84.groovy index 91ededbaddd0d4b..a69dd44fb37057b 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query84.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query84.groovy @@ -29,6 +29,7 @@ suite("query84") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select c_customer_id as customer_id , concat(concat(coalesce(c_last_name,''), ','), coalesce(c_first_name,'')) as customername diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query85.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query85.groovy index 7ecc82369a88b37..3ef45ade8b73a38 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query85.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query85.groovy @@ -29,6 +29,7 @@ suite("query85") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select substr(r_reason_desc,1,20) ,avg(ws_quantity) diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query86.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query86.groovy index 57557c327d49051..6803b4b594de51c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query86.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query86.groovy @@ -29,6 +29,7 @@ suite("query86") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select sum(ws_net_paid) as total_sum diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query87.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query87.groovy index a4abcae1a76a8a9..6a7e665ea4ace77 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query87.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query87.groovy @@ -29,6 +29,7 @@ suite("query87") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select count(*) from ((select distinct c_last_name, c_first_name, d_date diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query88.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query88.groovy index 14a8ae12c394c6c..401af85bd6632e3 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query88.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query88.groovy @@ -29,6 +29,7 @@ suite("query88") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select * from diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query89.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query89.groovy index 65a2b80a3d98b47..8e654061dce5aa9 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query89.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query89.groovy @@ -29,6 +29,7 @@ suite("query89") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select * from( diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query9.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query9.groovy index 6e0edf4376c8e9d..8110bc942b674e3 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query9.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query9.groovy @@ -29,6 +29,7 @@ suite("query9") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select case when (select count(*) from store_sales diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query90.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query90.groovy index 36f7a904282cf47..1a6743a7d6dd85e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query90.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query90.groovy @@ -29,6 +29,7 @@ suite("query90") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select cast(amc as decimal(15,4))/cast(pmc as decimal(15,4)) am_pm_ratio from ( select count(*) amc diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query91.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query91.groovy index 2e56a09692553d1..fe2e497bc278a74 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query91.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query91.groovy @@ -29,6 +29,7 @@ suite("query91") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select cc_call_center_id Call_Center, diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query92.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query92.groovy index febc29b3756fc44..181cdfb32a239be 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query92.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query92.groovy @@ -29,6 +29,7 @@ suite("query92") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select sum(ws_ext_discount_amt) as "Excess Discount Amount" diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query93.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query93.groovy index 6254c7a976d5e27..c03b90460747f0c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query93.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query93.groovy @@ -29,6 +29,7 @@ suite("query93") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select ss_customer_sk ,sum(act_sales) sumsales diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query94.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query94.groovy index a8b90773d9fce6e..bbbdca8c7e21ac5 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query94.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query94.groovy @@ -29,6 +29,7 @@ suite("query94") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select count(distinct ws_order_number) as "order count" diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query95.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query95.groovy index 144c9444db5d5de..eda70ba6edc353d 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query95.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query95.groovy @@ -29,6 +29,7 @@ suite("query95") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with ws_wh as (select ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2 diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query96.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query96.groovy index 1093559e5fa09ec..b296603c7927599 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query96.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query96.groovy @@ -29,6 +29,7 @@ suite("query96") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select count(*) from store_sales diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query97.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query97.groovy index dec8ba4fd83e3bd..93b3e29d906b123 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query97.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query97.groovy @@ -29,6 +29,7 @@ suite("query97") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """with ssci as ( select ss_customer_sk customer_sk diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query98.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query98.groovy index 9a9ef4e9af53fe2..118b1fd6cc996f7 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query98.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query98.groovy @@ -29,6 +29,7 @@ suite("query98") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select i_item_id ,i_item_desc diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query99.groovy b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query99.groovy index d1e5adf7746a1cf..e560299d8a21394 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query99.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query99.groovy @@ -29,6 +29,7 @@ suite("query99") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' sql 'set dump_nereids_memo=true' def ds = """select substr(w_warehouse_name,1,20) diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/rf_prune.tmpl b/regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/rf_prune.tmpl index ef0261d4f74899a..01feb595aa451ff 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/rf_prune.tmpl +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/rf_prune.tmpl @@ -28,6 +28,7 @@ suite("query{--}") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """{query}""" diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/shape.tmpl b/regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/shape.tmpl index dee3972b8bd1cd0..438826daf102b57 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/shape.tmpl +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/shape.tmpl @@ -29,6 +29,7 @@ suite("query{--}") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """{query}""" qt_ds_shape_{--} """ explain shape plan diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query1.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query1.groovy index 0e1e2bd9427e7e5..d989f204ea69504 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query1.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query1.groovy @@ -27,7 +27,8 @@ suite("query1") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query10.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query10.groovy index 65620ac5560410a..05087fc8546ee14 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query10.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query10.groovy @@ -27,7 +27,8 @@ suite("query10") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query11.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query11.groovy index 025098273720a4c..b3b183f53e96384 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query11.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query11.groovy @@ -27,7 +27,8 @@ suite("query11") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query12.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query12.groovy index c34f2376c435f6e..f49923b289ad69c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query12.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query12.groovy @@ -27,7 +27,8 @@ suite("query12") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query13.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query13.groovy index e14fd1889858c81..34929763812a00c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query13.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query13.groovy @@ -27,7 +27,8 @@ suite("query13") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query14.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query14.groovy index 8719f4f3243ef9e..125bd6ce9bcfe08 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query14.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query14.groovy @@ -27,7 +27,8 @@ suite("query14") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query15.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query15.groovy index 4ff03f461debe54..33ac6f6da1f2893 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query15.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query15.groovy @@ -27,7 +27,8 @@ suite("query15") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query16.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query16.groovy index 1daee4c033a710c..50797aa4a9f0116 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query16.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query16.groovy @@ -27,7 +27,8 @@ suite("query16") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query17.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query17.groovy index 0d5f654130ca89b..ddc15d7fb35bbbc 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query17.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query17.groovy @@ -27,7 +27,8 @@ suite("query17") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query18.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query18.groovy index 6c8e4410d7695ce..26f3240dc447314 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query18.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query18.groovy @@ -27,7 +27,8 @@ suite("query18") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query19.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query19.groovy index 9b2262f5b913197..6535cac97cadf6b 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query19.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query19.groovy @@ -27,7 +27,8 @@ suite("query19") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query2.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query2.groovy index 531f68d94e44250..d44fec3d54d68c0 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query2.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query2.groovy @@ -27,7 +27,8 @@ suite("query2") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query20.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query20.groovy index 989c07db0f1d6af..e2e001df12eb6bc 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query20.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query20.groovy @@ -27,7 +27,8 @@ suite("query20") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query21.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query21.groovy index a7d7efc1d55a1f6..a65a00bd684e206 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query21.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query21.groovy @@ -27,7 +27,8 @@ suite("query21") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query22.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query22.groovy index 81bdc718fa68e0b..fdfbfc5d6c63872 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query22.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query22.groovy @@ -27,7 +27,8 @@ suite("query22") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query23.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query23.groovy index b3af9ffc77a1a2f..a682d91fb78e9e8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query23.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query23.groovy @@ -27,7 +27,8 @@ suite("query23") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query24.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query24.groovy index eb2119f8f4e0f8e..2c5be13a651c28e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query24.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query24.groovy @@ -27,7 +27,8 @@ suite("query24") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query25.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query25.groovy index 350226d8965a8da..d86d4b7701e1422 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query25.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query25.groovy @@ -27,7 +27,8 @@ suite("query25") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query26.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query26.groovy index bb5a94bc839b38e..e2df053da4bc507 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query26.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query26.groovy @@ -27,7 +27,8 @@ suite("query26") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query27.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query27.groovy index c95e645ac619499..9d24d419cc19ede 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query27.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query27.groovy @@ -27,7 +27,8 @@ suite("query27") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query28.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query28.groovy index ecede28c79f1b0a..747d17c61016df9 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query28.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query28.groovy @@ -27,7 +27,8 @@ suite("query28") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query29.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query29.groovy index 32f92522aee3cb6..7337427803a1eb8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query29.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query29.groovy @@ -27,7 +27,8 @@ suite("query29") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query3.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query3.groovy index 606ae457a44839e..82e4cea197a744e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query3.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query3.groovy @@ -27,7 +27,8 @@ suite("query3") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query30.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query30.groovy index 3d1c5bc00aebaa7..fe2f5906791efeb 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query30.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query30.groovy @@ -27,7 +27,8 @@ suite("query30") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query31.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query31.groovy index 64d646f725a842e..848f18e06677ea0 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query31.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query31.groovy @@ -27,7 +27,8 @@ suite("query31") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query32.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query32.groovy index 4e59ee5672af905..1d3d4b511cb4b99 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query32.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query32.groovy @@ -27,7 +27,8 @@ suite("query32") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query33.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query33.groovy index 205e03b446c8e82..61713e91c7ed3ab 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query33.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query33.groovy @@ -27,7 +27,8 @@ suite("query33") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query34.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query34.groovy index 900e8838ec13ff5..a829351b0ab3969 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query34.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query34.groovy @@ -27,7 +27,8 @@ suite("query34") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query35.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query35.groovy index 45fe68795bfd750..075c9e1b1a58dbd 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query35.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query35.groovy @@ -27,7 +27,8 @@ suite("query35") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query36.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query36.groovy index a8811ca322e1f2f..c03319d0754a32f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query36.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query36.groovy @@ -27,7 +27,8 @@ suite("query36") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query37.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query37.groovy index dea9c6611a8b16f..4e482dbb8035fbe 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query37.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query37.groovy @@ -27,7 +27,8 @@ suite("query37") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query38.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query38.groovy index f6eee2e0dd3beed..30c07dc0166d10c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query38.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query38.groovy @@ -27,7 +27,8 @@ suite("query38") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query39.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query39.groovy index 21ffd76819625f4..aac30780bbbc8db 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query39.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query39.groovy @@ -27,7 +27,8 @@ suite("query39") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query4.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query4.groovy index 0e37c7479c6c157..bcf1f1531a3f022 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query4.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query4.groovy @@ -27,7 +27,8 @@ suite("query4") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query40.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query40.groovy index acab07c422121ed..823ba39be0e979c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query40.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query40.groovy @@ -27,7 +27,8 @@ suite("query40") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query41.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query41.groovy index 888c67135f51f6e..7b386b79b2820f1 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query41.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query41.groovy @@ -27,7 +27,8 @@ suite("query41") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query42.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query42.groovy index 991f697d71080aa..4eacedbf4f119c5 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query42.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query42.groovy @@ -27,7 +27,8 @@ suite("query42") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query43.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query43.groovy index f5c58e3e37bbbe1..ac9730a433a3073 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query43.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query43.groovy @@ -27,7 +27,8 @@ suite("query43") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query44.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query44.groovy index f814fa09fe71c20..a319e90b172e977 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query44.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query44.groovy @@ -27,7 +27,8 @@ suite("query44") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query45.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query45.groovy index d6130d392433055..7c3de6ddaa6ef4c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query45.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query45.groovy @@ -27,7 +27,8 @@ suite("query45") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query46.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query46.groovy index 31e080e488cdc1c..c8d27e3424c528d 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query46.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query46.groovy @@ -27,7 +27,8 @@ suite("query46") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query47.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query47.groovy index 8b0b8d68cbf0b34..0524a9146f2125f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query47.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query47.groovy @@ -27,7 +27,8 @@ suite("query47") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query48.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query48.groovy index f5537fe68137827..261ff96c2752f97 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query48.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query48.groovy @@ -27,7 +27,8 @@ suite("query48") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query49.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query49.groovy index c0042cb8d868da3..cb42805f0c4974a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query49.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query49.groovy @@ -27,7 +27,8 @@ suite("query49") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query5.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query5.groovy index 85b533ae51c16a5..20975924c7a8af4 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query5.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query5.groovy @@ -27,7 +27,8 @@ suite("query5") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query50.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query50.groovy index f2e6b2b43c64a31..328acb85bf664e4 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query50.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query50.groovy @@ -27,7 +27,8 @@ suite("query50") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query51.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query51.groovy index 89de95ce8f312e5..c5ea4dabde2f54f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query51.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query51.groovy @@ -27,7 +27,8 @@ suite("query51") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query52.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query52.groovy index 7663fa50ee35c19..032415d937dca9f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query52.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query52.groovy @@ -27,7 +27,8 @@ suite("query52") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query53.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query53.groovy index ec8b6b6463b652b..c39591fb9b67ad7 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query53.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query53.groovy @@ -27,7 +27,8 @@ suite("query53") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query54.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query54.groovy index 7e1bc9190460cbd..548c5c9c1b97f07 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query54.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query54.groovy @@ -27,7 +27,8 @@ suite("query54") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query55.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query55.groovy index a5f299661549e42..eb31ff50f28ae21 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query55.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query55.groovy @@ -27,7 +27,8 @@ suite("query55") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query56.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query56.groovy index c1f13f6b57ecba1..8a17401ff6c9205 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query56.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query56.groovy @@ -27,7 +27,8 @@ suite("query56") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query57.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query57.groovy index 93b8a90653a76ab..ae115e9271c92dd 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query57.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query57.groovy @@ -27,7 +27,8 @@ suite("query57") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query58.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query58.groovy index accdbad595c1677..ca816a730a42bb2 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query58.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query58.groovy @@ -27,7 +27,8 @@ suite("query58") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query59.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query59.groovy index 979969e5d421433..92874980033a3d8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query59.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query59.groovy @@ -27,7 +27,8 @@ suite("query59") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query6.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query6.groovy index f1509e52588f6b0..acd5c79fcc3c5a9 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query6.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query6.groovy @@ -27,7 +27,8 @@ suite("query6") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query60.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query60.groovy index b151f057baf9327..8a8ea8177bdac4a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query60.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query60.groovy @@ -27,7 +27,8 @@ suite("query60") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query61.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query61.groovy index 6cd464656d0d782..8931f91e3ec82c0 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query61.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query61.groovy @@ -27,7 +27,8 @@ suite("query61") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query62.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query62.groovy index 1344bc7997fdcf7..90ba4ad9605a9a0 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query62.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query62.groovy @@ -27,7 +27,8 @@ suite("query62") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query63.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query63.groovy index f0e32f0ae875649..c975b53d7d093fb 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query63.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query63.groovy @@ -27,7 +27,8 @@ suite("query63") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query64.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query64.groovy index 89bcd5d85ad1f87..ff08e383f4a7296 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query64.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query64.groovy @@ -27,7 +27,8 @@ suite("query64") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query65.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query65.groovy index 40a604f7f3f94ff..5f4d544248960ef 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query65.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query65.groovy @@ -27,7 +27,8 @@ suite("query65") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query66.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query66.groovy index 450cb99401b8123..3c21e3e2345b460 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query66.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query66.groovy @@ -27,7 +27,8 @@ suite("query66") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query67.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query67.groovy index dd0df5a21a2ed2f..14d78d5910dc636 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query67.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query67.groovy @@ -27,7 +27,8 @@ suite("query67") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query68.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query68.groovy index 510a146a49a81e3..cec709be27197d5 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query68.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query68.groovy @@ -27,7 +27,8 @@ suite("query68") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query69.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query69.groovy index df16047027b0918..35d4b56ed3b7bfb 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query69.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query69.groovy @@ -27,7 +27,8 @@ suite("query69") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query7.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query7.groovy index d6b3b3337b48b6f..a8f7d32c4eaa18b 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query7.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query7.groovy @@ -27,7 +27,8 @@ suite("query7") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query70.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query70.groovy index c6ae958f56abbe6..49d0ea1a7fec15e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query70.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query70.groovy @@ -27,7 +27,8 @@ suite("query70") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query71.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query71.groovy index edab120d0982bcb..dfcda48912cd81b 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query71.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query71.groovy @@ -27,7 +27,8 @@ suite("query71") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query72.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query72.groovy index 82956805deceec7..197e97719b88242 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query72.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query72.groovy @@ -27,7 +27,8 @@ suite("query72") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query73.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query73.groovy index 4ac9bdb8af91dfc..3823c6607644fc3 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query73.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query73.groovy @@ -27,7 +27,8 @@ suite("query73") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query74.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query74.groovy index b5e0388a97625ec..e88ef8b21bffd0c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query74.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query74.groovy @@ -27,7 +27,8 @@ suite("query74") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query75.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query75.groovy index e53f90b8e702766..3e7648179e0c7f1 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query75.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query75.groovy @@ -27,7 +27,8 @@ suite("query75") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query76.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query76.groovy index 6d17c8b8f34d9b5..62293b649dd3833 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query76.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query76.groovy @@ -27,7 +27,8 @@ suite("query76") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query77.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query77.groovy index edca02ef578add9..4fe8740e384f99e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query77.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query77.groovy @@ -27,7 +27,8 @@ suite("query77") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query78.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query78.groovy index a3b3ec0cee1f89c..e680c19c565b92d 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query78.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query78.groovy @@ -27,7 +27,8 @@ suite("query78") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query79.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query79.groovy index 0dcbff1382f76b2..6f775f4f55953f4 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query79.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query79.groovy @@ -27,7 +27,8 @@ suite("query79") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query8.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query8.groovy index 0c61c8c5baaac72..0012c076777183d 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query8.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query8.groovy @@ -27,7 +27,8 @@ suite("query8") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query80.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query80.groovy index fe0aef24d74117a..caa2a2a1b2ad765 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query80.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query80.groovy @@ -27,7 +27,8 @@ suite("query80") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query81.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query81.groovy index 9a062a3caf6a025..5c4656e5219f9fe 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query81.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query81.groovy @@ -27,7 +27,8 @@ suite("query81") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query82.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query82.groovy index 77d8d5da9b1df01..398de49bacba267 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query82.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query82.groovy @@ -27,7 +27,8 @@ suite("query82") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query83.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query83.groovy index 9a378a0edd31b9e..5bda799a774ff53 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query83.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query83.groovy @@ -27,7 +27,8 @@ suite("query83") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query84.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query84.groovy index 0b1fc72336bc883..22339b0162ba39e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query84.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query84.groovy @@ -27,7 +27,8 @@ suite("query84") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query85.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query85.groovy index c39e14fbc5d0d47..ad6d27dade51aef 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query85.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query85.groovy @@ -27,7 +27,8 @@ suite("query85") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query86.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query86.groovy index 6eebbf705e347d4..6b79ce082b8405e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query86.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query86.groovy @@ -27,7 +27,8 @@ suite("query86") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query87.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query87.groovy index af24851e69e7902..a84d7c5ce2d91c9 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query87.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query87.groovy @@ -27,7 +27,8 @@ suite("query87") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query88.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query88.groovy index f06be9410345f23..b78cfb405e5753c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query88.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query88.groovy @@ -27,7 +27,8 @@ suite("query88") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query89.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query89.groovy index 44765b6292d2c09..9a6a500f43f4c01 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query89.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query89.groovy @@ -27,7 +27,8 @@ suite("query89") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query9.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query9.groovy index edaf47049a12d5e..96597fd9e2e730e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query9.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query9.groovy @@ -27,7 +27,8 @@ suite("query9") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query90.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query90.groovy index 17422019b67bc9c..802d8e910a6a180 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query90.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query90.groovy @@ -27,7 +27,8 @@ suite("query90") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query91.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query91.groovy index 7113a199e37c94a..29986fb88fad7d3 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query91.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query91.groovy @@ -27,7 +27,8 @@ suite("query91") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query92.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query92.groovy index aabe7ee8933374c..f313dc3b56177d3 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query92.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query92.groovy @@ -27,7 +27,8 @@ suite("query92") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query93.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query93.groovy index 62206835f391d38..6e40745697a0064 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query93.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query93.groovy @@ -27,7 +27,8 @@ suite("query93") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query94.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query94.groovy index 6232b096dfcb58f..668173c099933d6 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query94.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query94.groovy @@ -27,7 +27,8 @@ suite("query94") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query95.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query95.groovy index f3178bf07806f80..1c21c8264e46063 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query95.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query95.groovy @@ -27,7 +27,8 @@ suite("query95") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query96.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query96.groovy index 47fc1f899c909fa..dd0fb0a2a0c58e6 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query96.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query96.groovy @@ -27,7 +27,8 @@ suite("query96") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query97.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query97.groovy index 1da5b8868518165..68488e138469fa9 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query97.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query97.groovy @@ -27,7 +27,8 @@ suite("query97") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query98.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query98.groovy index 888386ddf07ba54..a4491c0bbe68182 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query98.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query98.groovy @@ -27,7 +27,8 @@ suite("query98") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query99.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query99.groovy index 7d6452656beb2bd..4f86245024b6bda 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query99.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query99.groovy @@ -27,7 +27,8 @@ suite("query99") { sql 'set enable_runtime_filter_prune=true' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query1.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query1.groovy index 26554a39f81d31e..e681e8970b88c33 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query1.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query1.groovy @@ -27,7 +27,8 @@ suite("query1") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query10.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query10.groovy index 53d0148b50353f1..9b54b0000f29d24 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query10.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query10.groovy @@ -27,7 +27,8 @@ suite("query10") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query11.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query11.groovy index 846c94982d650c0..8fece7871d392ef 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query11.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query11.groovy @@ -27,7 +27,8 @@ suite("query11") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query12.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query12.groovy index a93431498f79e6f..58e1869dce67833 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query12.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query12.groovy @@ -27,7 +27,8 @@ suite("query12") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query13.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query13.groovy index 509b40264e48fac..f3b0e3b70558ed8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query13.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query13.groovy @@ -27,7 +27,8 @@ suite("query13") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query14.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query14.groovy index 97a628d2d1239fc..f67be346e7a1040 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query14.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query14.groovy @@ -27,7 +27,8 @@ suite("query14") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query15.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query15.groovy index 6298af6624711a3..c7e5bece88c1218 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query15.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query15.groovy @@ -27,7 +27,8 @@ suite("query15") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query16.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query16.groovy index bafff2194dc6c22..d7a0d509f2d6d4c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query16.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query16.groovy @@ -27,7 +27,8 @@ suite("query16") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query17.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query17.groovy index dce42b8bfea8322..fddb5222d883fb1 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query17.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query17.groovy @@ -27,7 +27,8 @@ suite("query17") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query18.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query18.groovy index 7de1ba01a179503..9f76dc6227cef94 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query18.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query18.groovy @@ -27,7 +27,8 @@ suite("query18") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query19.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query19.groovy index 266206bdd76caf9..1c48e3a0af45ae8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query19.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query19.groovy @@ -27,7 +27,8 @@ suite("query19") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query2.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query2.groovy index 85f4d45552e2b71..85bbd425de2eab1 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query2.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query2.groovy @@ -27,7 +27,8 @@ suite("query2") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query20.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query20.groovy index 639ced355d95d29..653a8f4b9333ca9 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query20.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query20.groovy @@ -27,7 +27,8 @@ suite("query20") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query21.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query21.groovy index 2cec3502f83dafe..f970316dc616d1d 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query21.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query21.groovy @@ -27,7 +27,8 @@ suite("query21") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query22.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query22.groovy index f741e48935467e5..f2ff38732d1aef7 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query22.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query22.groovy @@ -27,7 +27,8 @@ suite("query22") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query23.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query23.groovy index 1c65c846ff5b151..ba36794af0784ce 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query23.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query23.groovy @@ -27,7 +27,8 @@ suite("query23") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query24.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query24.groovy index adab9c674cc8b35..118a3abd214aab1 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query24.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query24.groovy @@ -27,7 +27,8 @@ suite("query24") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query25.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query25.groovy index bbc28a7d80a1a06..22b59eab594651e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query25.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query25.groovy @@ -27,7 +27,8 @@ suite("query25") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query26.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query26.groovy index 4bb5d146941ed74..666f666745d56f0 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query26.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query26.groovy @@ -27,7 +27,8 @@ suite("query26") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query27.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query27.groovy index 05c6e5153803756..49bbd943cdac12c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query27.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query27.groovy @@ -27,7 +27,8 @@ suite("query27") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query28.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query28.groovy index 8719c1ddb2a5012..5e4e6922302a16f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query28.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query28.groovy @@ -27,7 +27,8 @@ suite("query28") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query29.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query29.groovy index 1971fbd33adedbc..110146adc7c40ef 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query29.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query29.groovy @@ -27,7 +27,8 @@ suite("query29") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query3.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query3.groovy index ac128a2e820f8dd..844214fc1fcaeb7 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query3.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query3.groovy @@ -27,7 +27,8 @@ suite("query3") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query30.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query30.groovy index 3b890af7cd62a53..3ab6eab341eadab 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query30.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query30.groovy @@ -27,7 +27,8 @@ suite("query30") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query31.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query31.groovy index c69d01e9c729fba..d5c38c6580bd90a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query31.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query31.groovy @@ -27,7 +27,8 @@ suite("query31") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query32.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query32.groovy index 3227e2ccb943806..7ad1d1850fe11d2 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query32.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query32.groovy @@ -27,7 +27,8 @@ suite("query32") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query33.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query33.groovy index 6bf2033571a7618..81646fbb2bb8c43 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query33.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query33.groovy @@ -27,7 +27,8 @@ suite("query33") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query34.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query34.groovy index 65eaa27b7e70bdb..18921e29edb1e8a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query34.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query34.groovy @@ -27,7 +27,8 @@ suite("query34") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query35.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query35.groovy index 7b683915416613f..715d79fb0f3a4c3 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query35.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query35.groovy @@ -27,7 +27,8 @@ suite("query35") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query36.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query36.groovy index 9f0ea886317d76b..fb1016f7abd883e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query36.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query36.groovy @@ -27,7 +27,8 @@ suite("query36") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query37.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query37.groovy index cd8a9207ddd384e..ba5b7571ee13220 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query37.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query37.groovy @@ -27,7 +27,8 @@ suite("query37") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query38.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query38.groovy index 2abfd09519910f1..2af0ad48c633eba 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query38.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query38.groovy @@ -27,7 +27,8 @@ suite("query38") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query39.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query39.groovy index 024651a30c42982..192f49246828638 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query39.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query39.groovy @@ -27,7 +27,8 @@ suite("query39") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query4.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query4.groovy index ab34d48243912d1..b3969232aa945d5 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query4.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query4.groovy @@ -27,7 +27,8 @@ suite("query4") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query40.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query40.groovy index 5efdc2f4f589949..c50aa4bc328aa6d 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query40.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query40.groovy @@ -27,7 +27,8 @@ suite("query40") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query41.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query41.groovy index 552219d573ffda3..74587534ef15a1d 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query41.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query41.groovy @@ -27,7 +27,8 @@ suite("query41") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query42.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query42.groovy index 81a317b15e34ce7..4e89f85bad72a7a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query42.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query42.groovy @@ -27,7 +27,8 @@ suite("query42") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query43.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query43.groovy index 79909c1619b739f..907769e32378f66 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query43.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query43.groovy @@ -27,7 +27,8 @@ suite("query43") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query44.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query44.groovy index 4b0f61a501bc6ea..43f24de882defa3 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query44.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query44.groovy @@ -27,7 +27,8 @@ suite("query44") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query45.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query45.groovy index 5b79a3c12059c00..19647b4f2d72482 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query45.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query45.groovy @@ -27,7 +27,8 @@ suite("query45") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query46.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query46.groovy index 6b99ca94406022a..935fdd6f01997f5 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query46.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query46.groovy @@ -27,7 +27,8 @@ suite("query46") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query47.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query47.groovy index 553728ba5aba91f..33778042da412a8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query47.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query47.groovy @@ -27,7 +27,8 @@ suite("query47") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query48.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query48.groovy index c93541ec6f5ec5a..2db035e02976e9d 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query48.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query48.groovy @@ -27,7 +27,8 @@ suite("query48") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query49.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query49.groovy index cf95e2be47b13f2..b101e0cbbe29ce1 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query49.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query49.groovy @@ -27,7 +27,8 @@ suite("query49") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query5.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query5.groovy index 6e4cf14c7214c3d..2060a9bb54671e4 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query5.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query5.groovy @@ -27,7 +27,8 @@ suite("query5") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query50.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query50.groovy index 638fca17169e81c..7eaf37c57d6aef6 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query50.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query50.groovy @@ -27,7 +27,8 @@ suite("query50") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query51.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query51.groovy index c769f23aabbee7b..8da459061cb6197 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query51.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query51.groovy @@ -27,7 +27,8 @@ suite("query51") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query52.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query52.groovy index e5d0953ed8acd0a..5a93ff4826bd8c9 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query52.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query52.groovy @@ -27,7 +27,8 @@ suite("query52") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query53.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query53.groovy index e686e69b21b1fbd..fe8a670bb5ee9e6 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query53.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query53.groovy @@ -27,7 +27,8 @@ suite("query53") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query54.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query54.groovy index 572e7014391ea11..919d77122e3c802 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query54.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query54.groovy @@ -27,7 +27,8 @@ suite("query54") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query55.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query55.groovy index c6a8a854ab06d67..324e71cdf8337a3 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query55.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query55.groovy @@ -27,7 +27,8 @@ suite("query55") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query56.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query56.groovy index 3eaae3415e27286..fc8c2e042ad3c3b 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query56.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query56.groovy @@ -27,7 +27,8 @@ suite("query56") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query57.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query57.groovy index ad499098827274e..356fdbcdf093fff 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query57.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query57.groovy @@ -27,7 +27,8 @@ suite("query57") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query58.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query58.groovy index a6b104862cfe89b..4b27b2005c446a8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query58.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query58.groovy @@ -27,7 +27,8 @@ suite("query58") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query59.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query59.groovy index 50c41ed925a85d3..646ce58330987cd 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query59.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query59.groovy @@ -27,7 +27,8 @@ suite("query59") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query6.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query6.groovy index 088d3fb2ec44f07..e46f5570e0dd013 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query6.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query6.groovy @@ -27,7 +27,8 @@ suite("query6") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query60.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query60.groovy index 18d95002cda0e47..45f1a4e3cb6c732 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query60.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query60.groovy @@ -27,7 +27,8 @@ suite("query60") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query61.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query61.groovy index 70c917d449f226c..233c64c9a56a428 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query61.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query61.groovy @@ -27,7 +27,8 @@ suite("query61") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query62.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query62.groovy index 0d9f5b6a4b1f176..3c39001f40b506d 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query62.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query62.groovy @@ -27,7 +27,8 @@ suite("query62") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query63.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query63.groovy index 9d6d3d7a7b7eeda..da2017a12d2a41c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query63.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query63.groovy @@ -27,7 +27,8 @@ suite("query63") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query64.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query64.groovy index 342b1a1ed28301d..dce51ae42136b3e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query64.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query64.groovy @@ -27,7 +27,8 @@ suite("query64") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query65.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query65.groovy index 524325a739a5839..cace677a284d9e1 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query65.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query65.groovy @@ -27,7 +27,8 @@ suite("query65") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query66.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query66.groovy index 7659b2ec1ba9a74..0ff32e34d3b78e6 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query66.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query66.groovy @@ -27,7 +27,8 @@ suite("query66") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query67.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query67.groovy index 7d55220c9dd941d..2369c972540af4a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query67.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query67.groovy @@ -27,7 +27,8 @@ suite("query67") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query68.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query68.groovy index 24c974ebb84849b..a00dab4d3fe3979 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query68.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query68.groovy @@ -27,7 +27,8 @@ suite("query68") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query69.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query69.groovy index 698aae70a04b8d8..2c4f3412d5045e7 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query69.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query69.groovy @@ -27,7 +27,8 @@ suite("query69") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query7.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query7.groovy index ef8733e8dacedf6..32c4200525fb58e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query7.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query7.groovy @@ -27,7 +27,8 @@ suite("query7") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query70.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query70.groovy index 62c69a3586afab3..acf2122d61cfc09 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query70.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query70.groovy @@ -27,7 +27,8 @@ suite("query70") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query71.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query71.groovy index a73200423717213..82446c56be92984 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query71.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query71.groovy @@ -27,7 +27,8 @@ suite("query71") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query72.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query72.groovy index 0be1a567914d136..90f3e0df19569b6 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query72.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query72.groovy @@ -27,7 +27,8 @@ suite("query72") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query73.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query73.groovy index 9dabb2aefaa765b..41685a27e89df79 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query73.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query73.groovy @@ -27,7 +27,8 @@ suite("query73") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query74.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query74.groovy index ca97c586eea3964..921ccabbeee65a4 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query74.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query74.groovy @@ -27,7 +27,8 @@ suite("query74") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query75.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query75.groovy index d01415b6d08d1e7..2dec925800831fb 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query75.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query75.groovy @@ -27,7 +27,8 @@ suite("query75") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query76.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query76.groovy index 421ffcef1ff640a..223e5ef0a06a12c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query76.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query76.groovy @@ -27,7 +27,8 @@ suite("query76") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query77.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query77.groovy index 72a5f874681447f..424ead90e7c1e9b 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query77.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query77.groovy @@ -27,7 +27,8 @@ suite("query77") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query78.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query78.groovy index 23ec3579a828b50..7603fd5f19eab71 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query78.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query78.groovy @@ -27,7 +27,8 @@ suite("query78") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query79.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query79.groovy index 2cdd1a008476c9b..3bebd7510c74cbd 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query79.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query79.groovy @@ -27,7 +27,8 @@ suite("query79") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query8.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query8.groovy index 0370cac4fdae5d6..f348de0c3465410 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query8.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query8.groovy @@ -27,7 +27,8 @@ suite("query8") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query80.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query80.groovy index 8364c2739249d06..eab3314110f022e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query80.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query80.groovy @@ -27,7 +27,8 @@ suite("query80") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query81.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query81.groovy index 6852b31a6b6651a..3704d3fc0443b26 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query81.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query81.groovy @@ -27,7 +27,8 @@ suite("query81") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query82.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query82.groovy index 9ead5359b8bbe05..8ad252a81ca95ff 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query82.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query82.groovy @@ -27,7 +27,8 @@ suite("query82") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query83.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query83.groovy index 9ccbacafe1f1282..3db555de00e917f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query83.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query83.groovy @@ -27,7 +27,8 @@ suite("query83") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query84.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query84.groovy index 10fa88d8c678de8..4fce1377b2e5796 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query84.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query84.groovy @@ -27,7 +27,8 @@ suite("query84") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query85.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query85.groovy index d65815456cd6da6..75d69b73fd213ec 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query85.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query85.groovy @@ -27,7 +27,8 @@ suite("query85") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query86.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query86.groovy index eab2c3635feadab..3df80607f919c81 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query86.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query86.groovy @@ -27,7 +27,8 @@ suite("query86") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query87.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query87.groovy index 4331712f3a16ff6..e602706d175bf40 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query87.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query87.groovy @@ -27,7 +27,8 @@ suite("query87") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query88.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query88.groovy index a560ed77f112ca3..b8492f74fb436d1 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query88.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query88.groovy @@ -27,7 +27,8 @@ suite("query88") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query89.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query89.groovy index d07b1ecdb2b6ad6..e672fa56aab025c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query89.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query89.groovy @@ -27,7 +27,8 @@ suite("query89") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query9.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query9.groovy index 1a6213df0e83cca..2e22ba1fd0c7d6b 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query9.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query9.groovy @@ -27,7 +27,8 @@ suite("query9") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query90.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query90.groovy index eaec05f3b0c8a7a..21805cf06eea021 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query90.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query90.groovy @@ -27,7 +27,8 @@ suite("query90") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query91.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query91.groovy index 7808aecb4bf6c65..273551a0633bc44 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query91.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query91.groovy @@ -27,7 +27,8 @@ suite("query91") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query92.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query92.groovy index 5d8a4eccb155e96..c15c9038ad49999 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query92.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query92.groovy @@ -27,7 +27,8 @@ suite("query92") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query93.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query93.groovy index 1f102c46bd36bad..f36af29ff8f7030 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query93.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query93.groovy @@ -27,7 +27,8 @@ suite("query93") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query94.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query94.groovy index 927d225cb93354f..1a57894dd8fcfd4 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query94.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query94.groovy @@ -27,7 +27,8 @@ suite("query94") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query95.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query95.groovy index 00cb9d60e42c46e..dca29abc663f01a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query95.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query95.groovy @@ -27,7 +27,8 @@ suite("query95") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query96.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query96.groovy index 23df0cd352ca9a6..d4d96281cbea205 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query96.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query96.groovy @@ -27,7 +27,8 @@ suite("query96") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query97.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query97.groovy index 0aca247d0d9bf51..6bd42fcb6ac6899 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query97.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query97.groovy @@ -27,7 +27,8 @@ suite("query97") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query98.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query98.groovy index b80e74bc1d9b7cd..81b70e05713cbc2 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query98.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query98.groovy @@ -27,7 +27,8 @@ suite("query98") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query99.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query99.groovy index 5e745966f7f01f9..82eb43352f699b2 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query99.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query99.groovy @@ -27,7 +27,8 @@ suite("query99") { sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' sql 'set forbid_unknown_col_stats=false' -sql 'set enable_stats=false' + sql 'set enable_stats=false' + sql "set runtime_filter_type=8" sql 'set broadcast_row_count_limit = 30000000' sql 'set enable_nereids_timeout = false' sql 'SET enable_pipeline_engine = true' diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query1.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query1.groovy index 3fbe8f014134f96..f40fe89879e0746 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query1.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query1.groovy @@ -28,6 +28,7 @@ suite("query1") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with customer_total_return as diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query10.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query10.groovy index e06d86f9a0f9929..13f61d443505d16 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query10.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query10.groovy @@ -28,6 +28,7 @@ suite("query10") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query11.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query11.groovy index 5911091adbd8c89..d1a157a9012618f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query11.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query11.groovy @@ -28,6 +28,7 @@ suite("query11") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with year_total as ( diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query12.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query12.groovy index d923f38c6e7fad6..1e581474bae041a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query12.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query12.groovy @@ -28,6 +28,7 @@ suite("query12") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select i_item_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query13.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query13.groovy index 335e78f9130ac67..1614a789633584d 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query13.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query13.groovy @@ -28,6 +28,7 @@ suite("query13") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select avg(ss_quantity) diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query14.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query14.groovy index cc86d73025f0026..0c37eaf967c1996 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query14.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query14.groovy @@ -28,6 +28,7 @@ suite("query14") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with cross_items as diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query15.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query15.groovy index a5c877ffa7ca32c..1d7dee8d55bd8aa 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query15.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query15.groovy @@ -28,6 +28,7 @@ suite("query15") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select ca_zip diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query16.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query16.groovy index 519358350718784..4a585246ca7cecb 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query16.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query16.groovy @@ -28,6 +28,7 @@ suite("query16") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query17.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query17.groovy index 9677ea7f2e7c07b..2a81db0eb6cbaa4 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query17.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query17.groovy @@ -28,6 +28,7 @@ suite("query17") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select i_item_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query18.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query18.groovy index 03bde04c2b8e098..ca46be0d3b840d0 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query18.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query18.groovy @@ -28,6 +28,7 @@ suite("query18") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select i_item_id, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query19.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query19.groovy index a96af3a85bf2241..b54f85fbef3e147 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query19.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query19.groovy @@ -28,6 +28,7 @@ suite("query19") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query2.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query2.groovy index 11d726ef8245f90..a85edc048c46504 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query2.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query2.groovy @@ -28,6 +28,7 @@ suite("query2") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with wscs as diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query20.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query20.groovy index fe25128298459de..234f636f29c22a7 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query20.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query20.groovy @@ -28,6 +28,7 @@ suite("query20") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select i_item_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query21.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query21.groovy index 7efe0f921efd426..fa7b40fa909a744 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query21.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query21.groovy @@ -28,6 +28,7 @@ suite("query21") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select * diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query22.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query22.groovy index 8604b47f8540e2e..f67fc47b5006039 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query22.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query22.groovy @@ -28,6 +28,7 @@ suite("query22") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select i_product_name diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query23.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query23.groovy index d9a2f937f00ba49..51b70cbc1ff304e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query23.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query23.groovy @@ -28,6 +28,7 @@ suite("query23") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with frequent_ss_items as diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query24.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query24.groovy index 52c235516c39b4a..c53be825cf38c0a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query24.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query24.groovy @@ -28,6 +28,7 @@ suite("query24") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with ssales as diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query25.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query25.groovy index 0f16e5496d622d7..8b8763fd979a934 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query25.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query25.groovy @@ -28,6 +28,7 @@ suite("query25") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query26.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query26.groovy index 9a443e3c7c2cec5..009076d0c8e762f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query26.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query26.groovy @@ -28,6 +28,7 @@ suite("query26") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select i_item_id, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query27.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query27.groovy index 1550166d9cea57b..871aed717751542 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query27.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query27.groovy @@ -28,6 +28,7 @@ suite("query27") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select i_item_id, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query28.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query28.groovy index af80817fd14e854..aa9595822dcaf32 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query28.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query28.groovy @@ -28,6 +28,7 @@ suite("query28") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select * diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query29.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query29.groovy index 3645532b513793e..fa8066042755082 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query29.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query29.groovy @@ -28,6 +28,7 @@ suite("query29") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query3.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query3.groovy index e306ec1f4c25cf2..89898a4b201f892 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query3.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query3.groovy @@ -28,6 +28,7 @@ suite("query3") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select dt.d_year diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query30.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query30.groovy index b6274e129e12d57..3dfe9dbae437ce6 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query30.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query30.groovy @@ -28,6 +28,7 @@ suite("query30") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with customer_total_return as diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query31.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query31.groovy index d3649cb066f8e56..41c86b5c815c334 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query31.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query31.groovy @@ -28,6 +28,7 @@ suite("query31") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with ss as diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query32.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query32.groovy index 8081abdd26e53fb..81d4bd95ee7dc89 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query32.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query32.groovy @@ -28,6 +28,7 @@ suite("query32") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select sum(cs_ext_discount_amt) as "excess discount amount" diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query33.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query33.groovy index 24111a951232323..de283ae98f5608c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query33.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query33.groovy @@ -28,6 +28,7 @@ suite("query33") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with ss as ( diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query34.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query34.groovy index 6715b6f6bd24bc8..1fb12b37649503d 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query34.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query34.groovy @@ -28,6 +28,7 @@ suite("query34") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select c_last_name diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query35.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query35.groovy index 421b119cb82f268..8b2aa8546327baa 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query35.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query35.groovy @@ -28,6 +28,7 @@ suite("query35") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query36.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query36.groovy index 85c512663a1ce9d..16252b4f280abb9 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query36.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query36.groovy @@ -28,6 +28,7 @@ suite("query36") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query37.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query37.groovy index 14cf074643c887c..f104830b584d95d 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query37.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query37.groovy @@ -28,6 +28,7 @@ suite("query37") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select i_item_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query38.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query38.groovy index d24b6c0a5c2ea1a..1dd2ff269eb40d4 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query38.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query38.groovy @@ -28,6 +28,7 @@ suite("query38") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select count(*) from ( diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query39.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query39.groovy index 02c9981631b8e2d..9b228efe278efc0 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query39.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query39.groovy @@ -28,6 +28,7 @@ suite("query39") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with inv as diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query4.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query4.groovy index 52cec28b1c0fe61..2ed5d8b43f674a6 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query4.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query4.groovy @@ -28,6 +28,7 @@ suite("query4") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with year_total as ( diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query40.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query40.groovy index cb59410dbbf7e7b..020d0d5022e872a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query40.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query40.groovy @@ -28,6 +28,7 @@ suite("query40") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query41.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query41.groovy index 6e60b949f3f2d46..470bb1cc51cc5c8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query41.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query41.groovy @@ -28,6 +28,7 @@ suite("query41") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select distinct(i_product_name) diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query42.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query42.groovy index 973746780eedc32..4f977efb33a793e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query42.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query42.groovy @@ -28,6 +28,7 @@ suite("query42") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select dt.d_year diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query43.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query43.groovy index 17df78e13550fca..07ee4b67baa7877 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query43.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query43.groovy @@ -28,6 +28,7 @@ suite("query43") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select s_store_name, s_store_id, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query44.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query44.groovy index 1354a72cce7cda3..ade21517696758d 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query44.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query44.groovy @@ -28,6 +28,7 @@ suite("query44") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query45.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query45.groovy index 09c0628f59514de..9211ef194ca9828 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query45.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query45.groovy @@ -28,6 +28,7 @@ suite("query45") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select ca_zip, ca_city, sum(ws_sales_price) diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query46.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query46.groovy index 730f6aa8503cf91..fac119c0f5022cd 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query46.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query46.groovy @@ -28,6 +28,7 @@ suite("query46") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select c_last_name diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query47.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query47.groovy index bf57817220b2b60..f58c6efab5dc0fc 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query47.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query47.groovy @@ -28,6 +28,7 @@ suite("query47") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with v1 as( diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query48.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query48.groovy index 4232fa98c81caa0..fc57c87bf82bf7c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query48.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query48.groovy @@ -28,6 +28,7 @@ suite("query48") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select sum (ss_quantity) diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query49.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query49.groovy index 88c0bee485ce544..3288f1a0136e0e9 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query49.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query49.groovy @@ -28,6 +28,7 @@ suite("query49") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select channel, item, return_ratio, return_rank, currency_rank from diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query5.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query5.groovy index 2f080da648e3306..d8c81ce86fc84d6 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query5.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query5.groovy @@ -28,6 +28,7 @@ suite("query5") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with ssr as diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query50.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query50.groovy index c84f8d11f2f405c..61ae80f7aed8f2b 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query50.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query50.groovy @@ -28,6 +28,7 @@ suite("query50") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query51.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query51.groovy index 5952e5f1a78b9e0..054104d829760d3 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query51.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query51.groovy @@ -28,6 +28,7 @@ suite("query51") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """WITH web_v1 as ( diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query52.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query52.groovy index cf69c790fa09353..9111032b40a07a2 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query52.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query52.groovy @@ -28,6 +28,7 @@ suite("query52") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select dt.d_year diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query53.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query53.groovy index 795a00ddbeb58c7..01ee14f6062e04e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query53.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query53.groovy @@ -28,6 +28,7 @@ suite("query53") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select * from diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query54.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query54.groovy index a180a9a2574ebef..ee3783080b4b3b3 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query54.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query54.groovy @@ -28,6 +28,7 @@ suite("query54") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with my_customers as ( diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query55.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query55.groovy index b38174e118d44e9..53e3e958c7cdebf 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query55.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query55.groovy @@ -28,6 +28,7 @@ suite("query55") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select i_brand_id brand_id, i_brand brand, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query56.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query56.groovy index dc56d6510f5aa2a..59791b1b0ea3a5c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query56.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query56.groovy @@ -28,6 +28,7 @@ suite("query56") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with ss as ( diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query57.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query57.groovy index 5f24ea904aa7a67..84a9668e9b92c11 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query57.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query57.groovy @@ -28,6 +28,7 @@ suite("query57") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with v1 as( diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query58.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query58.groovy index 9ca8abe1656fbcf..64a960f31a7b56c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query58.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query58.groovy @@ -28,6 +28,7 @@ suite("query58") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with ss_items as diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query59.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query59.groovy index 82cc9e20f5a10a6..8b2503de35468bb 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query59.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query59.groovy @@ -28,6 +28,7 @@ suite("query59") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with wss as diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query6.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query6.groovy index 5705e44e38ffd84..823a1f87a673fd3 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query6.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query6.groovy @@ -28,6 +28,7 @@ suite("query6") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select a.ca_state state, count(*) cnt diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query60.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query60.groovy index 93cdee48ee7ba02..32ed3b0778f8f3c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query60.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query60.groovy @@ -28,6 +28,7 @@ suite("query60") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with ss as ( diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query61.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query61.groovy index 6b42a2e47921dc4..545158c91a082b8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query61.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query61.groovy @@ -28,6 +28,7 @@ suite("query61") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query62.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query62.groovy index c2165a11915ec49..44175ccab1c7135 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query62.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query62.groovy @@ -28,6 +28,7 @@ suite("query62") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query63.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query63.groovy index 2939a42b8cba6ed..7e226b7c7ad0ae9 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query63.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query63.groovy @@ -28,6 +28,7 @@ suite("query63") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select * diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query64.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query64.groovy index fa2aad18ea75c11..415a363cf6250ce 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query64.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query64.groovy @@ -28,6 +28,7 @@ suite("query64") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with cs_ui as diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query65.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query65.groovy index 24771fa55eec105..eb5d2d937bca871 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query65.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query65.groovy @@ -28,6 +28,7 @@ suite("query65") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query66.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query66.groovy index 57fd99eb3ba4e39..346884dcc7011f6 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query66.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query66.groovy @@ -28,6 +28,7 @@ suite("query66") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query67.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query67.groovy index 6c5074fe06206d7..1722dc983fef8ea 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query67.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query67.groovy @@ -28,6 +28,7 @@ suite("query67") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select * diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query68.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query68.groovy index e8741096ec9f48e..94fbc1079f77bc4 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query68.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query68.groovy @@ -28,6 +28,7 @@ suite("query68") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select c_last_name diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query69.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query69.groovy index 924233aaef74534..0b0ac47cdc38ae6 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query69.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query69.groovy @@ -28,6 +28,7 @@ suite("query69") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query7.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query7.groovy index 72ae3d473703df4..334468aeccdd748 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query7.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query7.groovy @@ -28,6 +28,7 @@ suite("query7") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select i_item_id, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query70.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query70.groovy index f6859362c84b6b0..31071814aec9164 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query70.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query70.groovy @@ -28,6 +28,7 @@ suite("query70") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query71.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query71.groovy index dddc1784cef1092..3dc8d1edf0bfb03 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query71.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query71.groovy @@ -28,6 +28,7 @@ suite("query71") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select i_brand_id brand_id, i_brand brand,t_hour,t_minute, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query72.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query72.groovy index 9449ed4c45f89ee..5e81401cb9de76f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query72.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query72.groovy @@ -28,6 +28,7 @@ suite("query72") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select /*+ SET_VAR(max_join_number_bushy_tree=10, memo_max_group_expression_size=15000)*/ i_item_desc diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query73.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query73.groovy index 08257b735f13353..3dde9af13a3d206 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query73.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query73.groovy @@ -28,6 +28,7 @@ suite("query73") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select c_last_name diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query74.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query74.groovy index 898009a5ea1ab12..7c4d11d2dc818a8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query74.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query74.groovy @@ -28,6 +28,7 @@ suite("query74") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with year_total as ( diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query75.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query75.groovy index bf920a1f94091bf..7dbefe6754e8149 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query75.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query75.groovy @@ -28,6 +28,7 @@ suite("query75") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """WITH all_sales AS ( diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query76.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query76.groovy index 90171df4d58d547..cd8db97bfd251b0 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query76.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query76.groovy @@ -28,6 +28,7 @@ suite("query76") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select channel, col_name, d_year, d_qoy, i_category, COUNT(*) sales_cnt, SUM(ext_sales_price) sales_amt FROM ( diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query77.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query77.groovy index c0761a405b5f6e2..8b19b0fc93020ec 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query77.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query77.groovy @@ -28,6 +28,7 @@ suite("query77") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with ss as diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query78.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query78.groovy index dfef0ac34ded3d6..8afd7fbc9dadc65 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query78.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query78.groovy @@ -28,6 +28,7 @@ suite("query78") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with ws as diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query79.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query79.groovy index 9a0789f06ed9310..a19d9084e88f969 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query79.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query79.groovy @@ -28,6 +28,7 @@ suite("query79") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query8.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query8.groovy index c73caec01ca3377..612b19616bd2867 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query8.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query8.groovy @@ -28,6 +28,7 @@ suite("query8") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select s_store_name diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query80.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query80.groovy index 519199d4cc4c84b..9149363080a408b 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query80.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query80.groovy @@ -28,6 +28,7 @@ suite("query80") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with ssr as diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query81.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query81.groovy index 9e10f6b87cf9196..97b340510d9cacc 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query81.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query81.groovy @@ -28,6 +28,7 @@ suite("query81") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with customer_total_return as diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query82.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query82.groovy index 959078416083b3d..1b1fb0b92feaeb3 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query82.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query82.groovy @@ -28,6 +28,7 @@ suite("query82") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select i_item_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query83.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query83.groovy index 9b380969dddc441..61f778f3457336f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query83.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query83.groovy @@ -28,6 +28,7 @@ suite("query83") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with sr_items as diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query84.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query84.groovy index 9bf823da7010f18..ff740dd46381734 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query84.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query84.groovy @@ -28,6 +28,7 @@ suite("query84") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select c_customer_id as customer_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query85.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query85.groovy index 5099c00480da977..ae1d13286db784f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query85.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query85.groovy @@ -28,6 +28,7 @@ suite("query85") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select substr(r_reason_desc,1,20) diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query86.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query86.groovy index 89399913b583632..d05f0779570ed6b 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query86.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query86.groovy @@ -28,6 +28,7 @@ suite("query86") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query87.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query87.groovy index d0c1756f245a70d..ad9ce108fd14d9f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query87.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query87.groovy @@ -28,6 +28,7 @@ suite("query87") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select count(*) diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query88.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query88.groovy index 5d8e00cecb702e8..bb9ce8eeda76c0e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query88.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query88.groovy @@ -28,6 +28,7 @@ suite("query88") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select * diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query89.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query89.groovy index f916fb7c0f9a93a..affeb2a91386a59 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query89.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query89.groovy @@ -28,6 +28,7 @@ suite("query89") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select * diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query9.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query9.groovy index eac7285b78b68dd..95bd87ede2a6896 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query9.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query9.groovy @@ -28,6 +28,7 @@ suite("query9") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select case when (select count(*) diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query90.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query90.groovy index 2a02253b20d84d4..3c1b6b6417beb4c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query90.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query90.groovy @@ -28,6 +28,7 @@ suite("query90") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select cast(amc as decimal(15,4))/cast(pmc as decimal(15,4)) am_pm_ratio diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query91.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query91.groovy index 9c2f49e4c345814..b82a582d456e09b 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query91.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query91.groovy @@ -28,6 +28,7 @@ suite("query91") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query92.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query92.groovy index 95a616da7b7dd54..4755802dc94e0ca 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query92.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query92.groovy @@ -28,6 +28,7 @@ suite("query92") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query93.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query93.groovy index 93e041d3d451550..714b9e6a57861bc 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query93.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query93.groovy @@ -28,6 +28,7 @@ suite("query93") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select ss_customer_sk diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query94.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query94.groovy index 5a402b39d249bc6..ef3b086df9871e2 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query94.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query94.groovy @@ -28,6 +28,7 @@ suite("query94") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query95.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query95.groovy index 39ec3e4e27d315e..9a856a77eaa3746 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query95.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query95.groovy @@ -28,6 +28,7 @@ suite("query95") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with ws_wh as diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query96.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query96.groovy index 5443f9e5328db04..3be405b11eb1e9b 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query96.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query96.groovy @@ -28,6 +28,7 @@ suite("query96") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select count(*) diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query97.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query97.groovy index d100e29d6c661bf..2dd9da9c061d1d1 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query97.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query97.groovy @@ -28,6 +28,7 @@ suite("query97") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """with ssci as ( diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query98.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query98.groovy index 5dadeaad68629de..f479e3c95e16286 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query98.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query98.groovy @@ -28,6 +28,7 @@ suite("query98") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select i_item_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query99.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query99.groovy index 2bb72474c570ae3..0658ad333c9a1a2 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query99.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query99.groovy @@ -28,6 +28,7 @@ suite("query99") { sql 'set parallel_pipeline_task_num=8; ' sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' + sql 'set runtime_filter_type=8' sql 'set enable_runtime_filter_prune=true' def ds = """select diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query1.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query1.groovy index fc02bd535df76c7..70bb576396998eb 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query1.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query1.groovy @@ -29,6 +29,7 @@ suite("query1") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with customer_total_return as (select sr_customer_sk as ctr_customer_sk ,sr_store_sk as ctr_store_sk diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query10.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query10.groovy index d3bd5b2020f612c..49c69c254571dec 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query10.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query10.groovy @@ -29,6 +29,7 @@ suite("query10") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select cd_gender, cd_marital_status, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query11.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query11.groovy index 975fff70ff958a9..e0eb0b0d7359453 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query11.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query11.groovy @@ -29,6 +29,7 @@ suite("query11") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with year_total as ( select c_customer_id customer_id ,c_first_name customer_first_name diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query12.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query12.groovy index aaa01a0a11173dd..37ebb0b4b2b32b8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query12.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query12.groovy @@ -29,6 +29,7 @@ suite("query12") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select i_item_id ,i_item_desc ,i_category diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query13.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query13.groovy index af382c410c92ff1..4b93fdd4830be69 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query13.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query13.groovy @@ -29,6 +29,7 @@ suite("query13") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select avg(ss_quantity) ,avg(ss_ext_sales_price) ,avg(ss_ext_wholesale_cost) diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query14.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query14.groovy index 2e8654d0d8c44e2..f4659431d115f62 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query14.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query14.groovy @@ -29,6 +29,7 @@ suite("query14") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with cross_items as (select i_item_sk ss_item_sk from item, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query15.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query15.groovy index 0a4311b3df2bbc4..7bebb77d6299e23 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query15.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query15.groovy @@ -29,6 +29,7 @@ suite("query15") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select ca_zip ,sum(cs_sales_price) from catalog_sales diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query16.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query16.groovy index f60cc48a3098f99..6b7501017310061 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query16.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query16.groovy @@ -29,6 +29,7 @@ suite("query16") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select count(distinct cs_order_number) as "order count" ,sum(cs_ext_ship_cost) as "total shipping cost" diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query17.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query17.groovy index e465b16d6e86088..78c1ed42c18d1b4 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query17.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query17.groovy @@ -29,6 +29,7 @@ suite("query17") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select i_item_id ,i_item_desc ,s_state diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query18.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query18.groovy index 75db6127b07cc3a..d6c553f74863a28 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query18.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query18.groovy @@ -29,6 +29,7 @@ suite("query18") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select i_item_id, ca_country, ca_state, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query19.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query19.groovy index 0a0cb1fa7d6c527..964511e299a2c15 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query19.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query19.groovy @@ -29,6 +29,7 @@ suite("query19") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, sum(ss_ext_sales_price) ext_price from date_dim, store_sales, item,customer,customer_address,store diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query2.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query2.groovy index 06517d39cb8b847..3e3f2fb77a4576d 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query2.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query2.groovy @@ -29,6 +29,7 @@ suite("query2") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with wscs as (select sold_date_sk ,sales_price diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query20.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query20.groovy index 4ab130634938f43..66aa37e9740fd73 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query20.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query20.groovy @@ -29,6 +29,7 @@ suite("query20") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select i_item_id ,i_item_desc ,i_category diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query21.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query21.groovy index f3aae7a91bfaa25..f4ce2bc117e5864 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query21.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query21.groovy @@ -29,6 +29,7 @@ suite("query21") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select * from(select w_warehouse_name ,i_item_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query22.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query22.groovy index 01d5f0bc3cb56ae..ff78abee8f38b04 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query22.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query22.groovy @@ -29,6 +29,7 @@ suite("query22") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select i_product_name ,i_brand ,i_class diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query23.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query23.groovy index 8b50af22e7c9894..714dddf20e082c8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query23.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query23.groovy @@ -29,6 +29,7 @@ suite("query23") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with frequent_ss_items as (select substr(i_item_desc,1,30) itemdesc,i_item_sk item_sk,d_date solddate,count(*) cnt from store_sales diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query24.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query24.groovy index 727d9797aa0c5f8..4b7c34bbc37fe54 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query24.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query24.groovy @@ -29,6 +29,7 @@ suite("query24") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with ssales as (select c_last_name ,c_first_name diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query25.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query25.groovy index fe4eeeccc5e85cc..b1dd9f38ec054b2 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query25.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query25.groovy @@ -29,6 +29,7 @@ suite("query25") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select i_item_id ,i_item_desc diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query26.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query26.groovy index 036578f24557935..55bc8e059e37cad 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query26.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query26.groovy @@ -29,6 +29,7 @@ suite("query26") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select i_item_id, avg(cs_quantity) agg1, avg(cs_list_price) agg2, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query27.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query27.groovy index 10bed124f608934..99ad16343ce645e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query27.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query27.groovy @@ -29,6 +29,7 @@ suite("query27") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select i_item_id, s_state, grouping(s_state) g_state, avg(ss_quantity) agg1, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query28.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query28.groovy index 7ac71bad782ebbc..6b2dce00c3dc503 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query28.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query28.groovy @@ -29,6 +29,7 @@ suite("query28") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select * from (select avg(ss_list_price) B1_LP ,count(ss_list_price) B1_CNT diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query29.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query29.groovy index 0bd4c740a7dc5bc..bc2d82494a7d6af 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query29.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query29.groovy @@ -29,6 +29,7 @@ suite("query29") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select i_item_id ,i_item_desc diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query3.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query3.groovy index 1542b49a52a7291..af1db41b45abdd7 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query3.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query3.groovy @@ -29,6 +29,7 @@ suite("query3") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select dt.d_year ,item.i_brand_id brand_id ,item.i_brand brand diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query30.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query30.groovy index f4c63d01ba6eda3..b1979697c3b296f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query30.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query30.groovy @@ -29,6 +29,7 @@ suite("query30") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with customer_total_return as (select wr_returning_customer_sk as ctr_customer_sk ,ca_state as ctr_state, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query31.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query31.groovy index e9eca0c98dd06a9..21239e6858ee24a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query31.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query31.groovy @@ -29,6 +29,7 @@ suite("query31") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with ss as (select ca_county,d_qoy, d_year,sum(ss_ext_sales_price) as store_sales from store_sales,date_dim,customer_address diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query32.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query32.groovy index aff6ba2abb64243..321719abbceeeda 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query32.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query32.groovy @@ -29,6 +29,7 @@ suite("query32") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select sum(cs_ext_discount_amt) as "excess discount amount" from catalog_sales diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query33.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query33.groovy index 46fb7dd0b2ee949..fc7eeab657bae3e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query33.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query33.groovy @@ -29,6 +29,7 @@ suite("query33") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with ss as ( select i_manufact_id,sum(ss_ext_sales_price) total_sales diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query34.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query34.groovy index c74f7305c4a75ee..c49379f789ca5da 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query34.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query34.groovy @@ -29,6 +29,7 @@ suite("query34") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select c_last_name ,c_first_name ,c_salutation diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query35.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query35.groovy index b0ed3d5ece05b00..1b46490f3e29fa4 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query35.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query35.groovy @@ -29,6 +29,7 @@ suite("query35") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select ca_state, cd_gender, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query36.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query36.groovy index 55daeae129d833e..e70fd4be763f7a9 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query36.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query36.groovy @@ -29,6 +29,7 @@ suite("query36") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select sum(ss_net_profit)/sum(ss_ext_sales_price) as gross_margin ,i_category diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query37.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query37.groovy index 07babb9ceeaafcb..877ed1899b9f6d0 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query37.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query37.groovy @@ -29,6 +29,7 @@ suite("query37") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select i_item_id ,i_item_desc ,i_current_price diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query38.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query38.groovy index 0827da6727d4d2a..750edf5e40e5a95 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query38.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query38.groovy @@ -29,6 +29,7 @@ suite("query38") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select count(*) from ( select distinct c_last_name, c_first_name, d_date from store_sales, date_dim, customer diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query39.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query39.groovy index f6c57cba9cc95aa..22460290cee2697 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query39.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query39.groovy @@ -29,6 +29,7 @@ suite("query39") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with inv as (select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy ,stdev,mean, case mean when 0 then null else stdev/mean end cov diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query4.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query4.groovy index 5bbb6a982c791f5..4bee9d7d4110287 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query4.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query4.groovy @@ -29,6 +29,7 @@ suite("query4") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with year_total as ( select c_customer_id customer_id ,c_first_name customer_first_name diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query40.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query40.groovy index 7cc861a9a5c5847..f78340477a564a8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query40.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query40.groovy @@ -29,6 +29,7 @@ suite("query40") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select w_state ,i_item_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query41.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query41.groovy index f91971968001625..3849d90c42456ed 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query41.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query41.groovy @@ -29,6 +29,7 @@ suite("query41") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select distinct(i_product_name) from item i1 where i_manufact_id between 748 and 748+40 diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query42.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query42.groovy index 1480eaf8503b6df..3afebaef308f473 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query42.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query42.groovy @@ -29,6 +29,7 @@ suite("query42") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select dt.d_year ,item.i_category_id ,item.i_category diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query43.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query43.groovy index 7f596e7a471ec86..11f824d2c57c5e8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query43.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query43.groovy @@ -29,6 +29,7 @@ suite("query43") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select s_store_name, s_store_id, sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query44.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query44.groovy index ca7b19e4d0fa1e5..fb524fdab6c5c81 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query44.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query44.groovy @@ -29,6 +29,7 @@ suite("query44") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing from(select * from (select item_sk,rank() over (order by rank_col asc) rnk diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query45.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query45.groovy index e2c985d366559f7..b7a7601a217e2b8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query45.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query45.groovy @@ -29,6 +29,7 @@ suite("query45") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select ca_zip, ca_city, sum(ws_sales_price) from web_sales, customer, customer_address, date_dim, item where ws_bill_customer_sk = c_customer_sk diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query46.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query46.groovy index a4c6cd1bbc4d160..5bda29e31834ce9 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query46.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query46.groovy @@ -29,6 +29,7 @@ suite("query46") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select c_last_name ,c_first_name ,ca_city diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query47.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query47.groovy index 3e984c7a307f302..394c9b7bc4d2c51 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query47.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query47.groovy @@ -29,6 +29,7 @@ suite("query47") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with v1 as( select i_category, i_brand, s_store_name, s_company_name, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query48.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query48.groovy index b710a3f46cedd53..8c4a7f9ba27116b 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query48.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query48.groovy @@ -29,6 +29,7 @@ suite("query48") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select sum (ss_quantity) from store_sales, store, customer_demographics, customer_address, date_dim where s_store_sk = ss_store_sk diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query49.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query49.groovy index 2a0e9e029efa41f..fed82ce6b6aa490 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query49.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query49.groovy @@ -29,6 +29,7 @@ suite("query49") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select channel, item, return_ratio, return_rank, currency_rank from (select 'web' as channel diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query5.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query5.groovy index 64e0d81353744e4..650b5dd40425c84 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query5.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query5.groovy @@ -29,6 +29,7 @@ suite("query5") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with ssr as (select s_store_id, sum(sales_price) as sales, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query50.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query50.groovy index 4f332641a6ddb0a..2fd1c92a364bc0b 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query50.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query50.groovy @@ -29,6 +29,7 @@ suite("query50") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select s_store_name ,s_company_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query51.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query51.groovy index 4bc2c956fff2db3..247472728200212 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query51.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query51.groovy @@ -29,6 +29,7 @@ suite("query51") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """WITH web_v1 as ( select ws_item_sk item_sk, d_date, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query52.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query52.groovy index e730303177a523a..89918248b202a5b 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query52.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query52.groovy @@ -29,6 +29,7 @@ suite("query52") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select dt.d_year ,item.i_brand_id brand_id ,item.i_brand brand diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query53.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query53.groovy index aa7f55a6f3b034c..fd816572e131132 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query53.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query53.groovy @@ -29,6 +29,7 @@ suite("query53") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select * from (select i_manufact_id, sum(ss_sales_price) sum_sales, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query54.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query54.groovy index 6cb457ef82d8e37..09c6edf68d2b9e5 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query54.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query54.groovy @@ -29,6 +29,7 @@ suite("query54") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with my_customers as ( select distinct c_customer_sk , c_current_addr_sk diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query55.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query55.groovy index 071a3b65fcfb706..692b94a737565f0 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query55.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query55.groovy @@ -29,6 +29,7 @@ suite("query55") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select i_brand_id brand_id, i_brand brand, sum(ss_ext_sales_price) ext_price from date_dim, store_sales, item diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query56.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query56.groovy index 8570ca50f46646d..820c947b39d41d1 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query56.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query56.groovy @@ -29,6 +29,7 @@ suite("query56") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with ss as ( select i_item_id,sum(ss_ext_sales_price) total_sales from diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query57.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query57.groovy index 04218ef12b6ea66..c5557f980158ed1 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query57.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query57.groovy @@ -29,6 +29,7 @@ suite("query57") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with v1 as( select i_category, i_brand, cc_name, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query58.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query58.groovy index c64496db9d5153b..caf4875d7224a2a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query58.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query58.groovy @@ -29,6 +29,7 @@ suite("query58") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with ss_items as (select i_item_id item_id ,sum(ss_ext_sales_price) ss_item_rev diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query59.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query59.groovy index 1ffbc372d762ca7..12a190b16c9bfc2 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query59.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query59.groovy @@ -29,6 +29,7 @@ suite("query59") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with wss as (select d_week_seq, ss_store_sk, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query6.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query6.groovy index 6de72a7d728a0a6..f76d38a9604bcae 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query6.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query6.groovy @@ -29,6 +29,7 @@ suite("query6") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select a.ca_state state, count(*) cnt from customer_address a ,customer c diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query60.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query60.groovy index d110e1cc8778b52..936666cd78ca6db 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query60.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query60.groovy @@ -29,6 +29,7 @@ suite("query60") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with ss as ( select i_item_id,sum(ss_ext_sales_price) total_sales diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query61.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query61.groovy index 2cc916921091634..266135c4687cec2 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query61.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query61.groovy @@ -29,6 +29,7 @@ suite("query61") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 from (select sum(ss_ext_sales_price) promotions diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query62.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query62.groovy index bb7b81612f33c5a..55d0385c1d0f223 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query62.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query62.groovy @@ -29,6 +29,7 @@ suite("query62") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select substr(w_warehouse_name,1,20) ,sm_type diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query63.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query63.groovy index c34d27a1c377fd1..ef36046556e2f44 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query63.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query63.groovy @@ -29,6 +29,7 @@ suite("query63") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select * from (select i_manager_id ,sum(ss_sales_price) sum_sales diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query64.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query64.groovy index 1cca3ba96561042..8b09aec143c5b47 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query64.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query64.groovy @@ -29,6 +29,7 @@ suite("query64") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with cs_ui as (select cs_item_sk ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query65.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query65.groovy index c7ec361c6602930..077b87c3782b2a8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query65.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query65.groovy @@ -29,6 +29,7 @@ suite("query65") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select s_store_name, i_item_desc, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query66.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query66.groovy index e8dc4eb3c644612..e15e2797d3f529f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query66.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query66.groovy @@ -29,6 +29,7 @@ suite("query66") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select w_warehouse_name ,w_warehouse_sq_ft diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query67.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query67.groovy index 57ab62b0f65f914..3dd94b358c64436 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query67.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query67.groovy @@ -29,6 +29,7 @@ suite("query67") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select * from (select i_category ,i_class diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query68.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query68.groovy index 8b05364c02e3881..691044ad1a8636a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query68.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query68.groovy @@ -29,6 +29,7 @@ suite("query68") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select c_last_name ,c_first_name ,ca_city diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query69.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query69.groovy index b41570abb04e611..b90ccf8e7373f4a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query69.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query69.groovy @@ -29,6 +29,7 @@ suite("query69") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select cd_gender, cd_marital_status, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query7.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query7.groovy index 71e20e7ac651bba..d5dc6625fa0f86e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query7.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query7.groovy @@ -29,6 +29,7 @@ suite("query7") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select i_item_id, avg(ss_quantity) agg1, avg(ss_list_price) agg2, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query70.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query70.groovy index 66f2074a02b663c..ff1cd0307d1892f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query70.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query70.groovy @@ -29,6 +29,7 @@ suite("query70") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select sum(ss_net_profit) as total_sum ,s_state diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query71.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query71.groovy index 89c808984a0fb6b..e70e3eb1e0df4ec 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query71.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query71.groovy @@ -29,6 +29,7 @@ suite("query71") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select i_brand_id brand_id, i_brand brand,t_hour,t_minute, sum(ext_price) ext_price from item, (select ws_ext_sales_price as ext_price, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query72.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query72.groovy index 32a06bb7a1978a4..b2824bfd9f0373a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query72.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query72.groovy @@ -29,6 +29,7 @@ suite("query72") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select /*+ SET_VAR(max_join_number_bushy_tree=10, memo_max_group_expression_size=15000)*/ i_item_desc ,w_warehouse_name ,d1.d_week_seq diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query73.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query73.groovy index bb9e9713220210b..c933dc532548efd 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query73.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query73.groovy @@ -29,6 +29,7 @@ suite("query73") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select c_last_name ,c_first_name ,c_salutation diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query74.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query74.groovy index f50ce2c490633ea..e46a962dae1e6bb 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query74.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query74.groovy @@ -29,6 +29,7 @@ suite("query74") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with year_total as ( select c_customer_id customer_id ,c_first_name customer_first_name diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query75.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query75.groovy index d5888b884f2f2c3..5cbf506aee7e447 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query75.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query75.groovy @@ -29,6 +29,7 @@ suite("query75") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """WITH all_sales AS ( SELECT d_year ,i_brand_id diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query76.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query76.groovy index 159a8f4b66fba9d..4ae7284050e453e 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query76.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query76.groovy @@ -29,6 +29,7 @@ suite("query76") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select channel, col_name, d_year, d_qoy, i_category, COUNT(*) sales_cnt, SUM(ext_sales_price) sales_amt FROM ( SELECT 'store' as channel, 'ss_hdemo_sk' col_name, d_year, d_qoy, i_category, ss_ext_sales_price ext_sales_price FROM store_sales, item, date_dim diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query77.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query77.groovy index a20c592be39fabf..fc7ea6724af8392 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query77.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query77.groovy @@ -29,6 +29,7 @@ suite("query77") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with ss as (select s_store_sk, sum(ss_ext_sales_price) as sales, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query78.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query78.groovy index 9b22171226758c2..c64573e4d9fd2ad 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query78.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query78.groovy @@ -29,6 +29,7 @@ suite("query78") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with ws as (select d_year AS ws_sold_year, ws_item_sk, ws_bill_customer_sk ws_customer_sk, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query79.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query79.groovy index abe3635808da746..259d6e385ab6f06 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query79.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query79.groovy @@ -29,6 +29,7 @@ suite("query79") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select c_last_name,c_first_name,substr(s_city,1,30),ss_ticket_number,amt,profit from diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query8.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query8.groovy index 0f407b2543a6a7e..4547c410b571afe 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query8.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query8.groovy @@ -29,6 +29,7 @@ suite("query8") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select s_store_name ,sum(ss_net_profit) from store_sales diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query80.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query80.groovy index 5a71cc054eb0522..db3e02df9d90ac3 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query80.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query80.groovy @@ -29,6 +29,7 @@ suite("query80") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with ssr as (select s_store_id as store_id, sum(ss_ext_sales_price) as sales, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query81.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query81.groovy index ef9075908799d41..058a5f5a17decd0 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query81.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query81.groovy @@ -29,6 +29,7 @@ suite("query81") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with customer_total_return as (select cr_returning_customer_sk as ctr_customer_sk ,ca_state as ctr_state, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query82.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query82.groovy index b4d625bf2fa3429..63d4df48b05e03a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query82.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query82.groovy @@ -29,6 +29,7 @@ suite("query82") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select i_item_id ,i_item_desc ,i_current_price diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query83.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query83.groovy index 3fb6dfe5464ecfd..0cdc9cf3b5b7f6f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query83.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query83.groovy @@ -29,6 +29,7 @@ suite("query83") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with sr_items as (select i_item_id item_id, sum(sr_return_quantity) sr_item_qty diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query84.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query84.groovy index e590f54cdbdeec3..baac6a0798a4e4d 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query84.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query84.groovy @@ -29,6 +29,7 @@ suite("query84") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select c_customer_id as customer_id , concat(concat(coalesce(c_last_name,''), ','), coalesce(c_first_name,'')) as customername from customer diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query85.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query85.groovy index 8702e29057e381f..9894e427ab31481 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query85.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query85.groovy @@ -29,6 +29,7 @@ suite("query85") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select substr(r_reason_desc,1,20) ,avg(ws_quantity) ,avg(wr_refunded_cash) diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query86.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query86.groovy index b017562447820f2..5e379bbb24e0823 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query86.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query86.groovy @@ -29,6 +29,7 @@ suite("query86") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select sum(ws_net_paid) as total_sum ,i_category diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query87.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query87.groovy index 8c545dbdfe8d6e5..93a1f57cbd3b000 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query87.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query87.groovy @@ -29,6 +29,7 @@ suite("query87") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select count(*) from ((select distinct c_last_name, c_first_name, d_date from store_sales, date_dim, customer diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query88.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query88.groovy index 88993dc094bad06..b8eac02e1849f9a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query88.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query88.groovy @@ -29,6 +29,7 @@ suite("query88") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select * from (select count(*) h8_30_to_9 diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query89.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query89.groovy index 28371cd390cd73e..77556d65133181a 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query89.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query89.groovy @@ -29,6 +29,7 @@ suite("query89") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select * from( select i_category, i_class, i_brand, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query9.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query9.groovy index 68e1348fad16d12..c18cc65d20cd21f 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query9.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query9.groovy @@ -29,6 +29,7 @@ suite("query9") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select case when (select count(*) from store_sales where ss_quantity between 1 and 20) > 2972190 diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query90.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query90.groovy index 3af81c2374b1aef..222c25305c487b8 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query90.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query90.groovy @@ -29,6 +29,7 @@ suite("query90") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select cast(amc as decimal(15,4))/cast(pmc as decimal(15,4)) am_pm_ratio from ( select count(*) amc from web_sales, household_demographics , time_dim, web_page diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query91.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query91.groovy index 44961f051f8643d..af6d4f76e46b222 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query91.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query91.groovy @@ -29,6 +29,7 @@ suite("query91") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select cc_call_center_id Call_Center, cc_name Call_Center_Name, diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query92.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query92.groovy index a66ff2a668269dc..3db3bdafdd82920 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query92.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query92.groovy @@ -29,6 +29,7 @@ suite("query92") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select sum(ws_ext_discount_amt) as "Excess Discount Amount" from diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query93.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query93.groovy index 620ef7c9877c270..db971ecbfbbf29c 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query93.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query93.groovy @@ -29,6 +29,7 @@ suite("query93") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select ss_customer_sk ,sum(act_sales) sumsales from (select ss_item_sk diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query94.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query94.groovy index be1536e816f4063..f8df10a781140c9 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query94.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query94.groovy @@ -29,6 +29,7 @@ suite("query94") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select count(distinct ws_order_number) as "order count" ,sum(ws_ext_ship_cost) as "total shipping cost" diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query95.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query95.groovy index 2c2274e602db6fb..b43bb33b48ef197 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query95.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query95.groovy @@ -29,6 +29,7 @@ suite("query95") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with ws_wh as (select ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2 from web_sales ws1,web_sales ws2 diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query96.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query96.groovy index bd12c22b9ac77a8..d9b3719574a7ef2 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query96.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query96.groovy @@ -29,6 +29,7 @@ suite("query96") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select count(*) from store_sales ,household_demographics diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query97.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query97.groovy index 287fff8979e8b11..57317cd1da8ffd6 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query97.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query97.groovy @@ -29,6 +29,7 @@ suite("query97") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """with ssci as ( select ss_customer_sk customer_sk ,ss_item_sk item_sk diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query98.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query98.groovy index c5f52bdef3ff8fc..8bbcb4948e6200d 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query98.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query98.groovy @@ -29,6 +29,7 @@ suite("query98") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select i_item_id ,i_item_desc ,i_category diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query99.groovy b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query99.groovy index 19f1f570d805c44..64069f757894d61 100644 --- a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query99.groovy +++ b/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query99.groovy @@ -29,6 +29,7 @@ suite("query99") { sql 'set forbid_unknown_col_stats=true' sql 'set enable_nereids_timeout = false' sql 'set enable_runtime_filter_prune=false' + sql 'set runtime_filter_type=8' def ds = """select substr(w_warehouse_name,1,20) ,sm_type diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q1.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q1.groovy index eecb9009b281204..7da68f9463ecd87 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q1.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q1.groovy @@ -25,7 +25,8 @@ suite("q1") { sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' sql 'set parallel_pipeline_task_num=8' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q10.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q10.groovy index 45481787f09fb2c..4e73f443e5efa9d 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q10.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q10.groovy @@ -27,7 +27,8 @@ suite("q10") { sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' sql 'set parallel_pipeline_task_num=8' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q11.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q11.groovy index d3e58acaceab0fe..e87f059055e6bea 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q11.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q11.groovy @@ -26,7 +26,8 @@ suite("q11") { sql 'set parallel_pipeline_task_num=8' sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q12.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q12.groovy index 7c83ed5b00e812a..f3b688dd85153a9 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q12.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q12.groovy @@ -26,7 +26,8 @@ suite("q12") { sql 'set parallel_pipeline_task_num=8' sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q13.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q13.groovy index 305a891b6a697ab..d95fd7021d11b89 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q13.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q13.groovy @@ -30,7 +30,8 @@ suite("q13") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q14.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q14.groovy index c5a856bec7699a2..390fc7133ad08e4 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q14.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q14.groovy @@ -27,7 +27,8 @@ suite("q14") { sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' sql 'set parallel_pipeline_task_num=8' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q15.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q15.groovy index 5d8046e1754e274..19fa9e0040f9b54 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q15.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q15.groovy @@ -30,7 +30,8 @@ suite("q15") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q16.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q16.groovy index c51c44f5c89b25a..db46a0df73f0dd7 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q16.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q16.groovy @@ -30,7 +30,8 @@ suite("q16") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q17.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q17.groovy index ecd5e1aea60cff4..c091d6ed0e53010 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q17.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q17.groovy @@ -30,7 +30,8 @@ suite("q17") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q18.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q18.groovy index 298bd134bd637bb..d603cf0957b08bd 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q18.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q18.groovy @@ -30,7 +30,8 @@ suite("q18") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q19.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q19.groovy index fdf6029fbc48c14..0dac2c84121d062 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q19.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q19.groovy @@ -31,7 +31,8 @@ suite("q19") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q2.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q2.groovy index 7faffd04f5651fb..8c096549d6308a5 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q2.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q2.groovy @@ -30,7 +30,8 @@ suite("q2") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20-rewrite.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20-rewrite.groovy index eb2f5b38524634d..38d6bf649f74b81 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20-rewrite.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20-rewrite.groovy @@ -31,7 +31,8 @@ suite("q20-rewrite") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20.groovy index 1b01916442f7c83..1a73c9ae8c90962 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20.groovy @@ -31,7 +31,8 @@ suite("q20") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q21.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q21.groovy index 349a0bba8ec6263..2261450c90ecf49 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q21.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q21.groovy @@ -30,7 +30,8 @@ suite("q21") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q22.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q22.groovy index 4a680a13fabebb5..aca4b8485ca8bb8 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q22.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q22.groovy @@ -30,7 +30,8 @@ suite("q22") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q3.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q3.groovy index 68ffe43441972f8..890259f9d44054b 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q3.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q3.groovy @@ -31,7 +31,8 @@ suite("q3") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q4.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q4.groovy index a5820c6a580b995..8107d37cc6928de 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q4.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q4.groovy @@ -25,7 +25,8 @@ suite("q4") { sql "set runtime_filter_mode='GLOBAL'" sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q5.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q5.groovy index 63266cc181b48ab..433e7ee09e8cf4d 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q5.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q5.groovy @@ -30,7 +30,8 @@ suite("q5") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q6.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q6.groovy index 1c10ea4e92474a1..e39b008b5ac32f9 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q6.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q6.groovy @@ -30,7 +30,8 @@ suite("q6") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q7.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q7.groovy index 2179fbebbbb39b4..064cb5a6173f8cd 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q7.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q7.groovy @@ -30,7 +30,8 @@ suite("q7") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q8.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q8.groovy index 99f7e245f39d9a9..afd09f2aaef6dd8 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q8.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q8.groovy @@ -30,7 +30,8 @@ suite("q8") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q9.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q9.groovy index eca2b0a314aba42..2cd007d65475536 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q9.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q9.groovy @@ -30,7 +30,8 @@ suite("q9") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q1.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q1.groovy index be8b858ebca4c4e..241e27267ecc21a 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q1.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q1.groovy @@ -25,7 +25,8 @@ suite("q1") { sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' sql 'set parallel_pipeline_task_num=8' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q10.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q10.groovy index 5e004aec397776f..ebd3bdede27eb17 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q10.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q10.groovy @@ -27,7 +27,8 @@ suite("q10") { sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' sql 'set parallel_pipeline_task_num=8' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q11.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q11.groovy index 54a4b8fccd61c31..e9db767a8bf56e7 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q11.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q11.groovy @@ -26,7 +26,8 @@ suite("q11") { sql 'set parallel_pipeline_task_num=8' sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q12.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q12.groovy index 796a880917fc0df..9a049f7aea13e90 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q12.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q12.groovy @@ -26,7 +26,8 @@ suite("q12") { sql 'set parallel_pipeline_task_num=8' sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q13.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q13.groovy index abbf891428ecbc9..69b98061e7984c1 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q13.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q13.groovy @@ -30,7 +30,8 @@ suite("q13") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q14.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q14.groovy index c9ae56fad1cdb03..33b4e97d4bd594e 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q14.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q14.groovy @@ -27,7 +27,8 @@ suite("q14") { sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' sql 'set parallel_pipeline_task_num=8' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q15.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q15.groovy index 8d348ecdc0ada59..2d06612ef219398 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q15.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q15.groovy @@ -30,7 +30,8 @@ suite("q15") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q16.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q16.groovy index 670dfda0000be5e..2ed44283f98d726 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q16.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q16.groovy @@ -30,7 +30,8 @@ suite("q16") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q17.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q17.groovy index 177fa6b86d22eba..df9ece5c96f54b8 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q17.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q17.groovy @@ -30,7 +30,8 @@ suite("q17") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q18.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q18.groovy index f18c6851a6e087a..91305cc50e489bf 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q18.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q18.groovy @@ -30,7 +30,8 @@ suite("q18") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q19.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q19.groovy index e653613ccff086c..61452ddf6dc585c 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q19.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q19.groovy @@ -31,7 +31,8 @@ suite("q19") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q2.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q2.groovy index d6f07a94920f347..799af02cf40391b 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q2.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q2.groovy @@ -30,7 +30,8 @@ suite("q2") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q20-rewrite.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q20-rewrite.groovy index 100cabcf4517931..10396bfbe7ed877 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q20-rewrite.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q20-rewrite.groovy @@ -31,7 +31,8 @@ suite("q20-rewrite") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q20.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q20.groovy index 6de71424e5f5dda..222084adb39aa04 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q20.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q20.groovy @@ -31,7 +31,8 @@ suite("q20") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q21.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q21.groovy index b78ce0df69f7457..4792d1089399b5d 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q21.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q21.groovy @@ -30,7 +30,8 @@ suite("q21") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q22.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q22.groovy index 1320b4709de63ce..080387f73d0d05f 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q22.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q22.groovy @@ -30,7 +30,8 @@ suite("q22") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q3.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q3.groovy index 876654f6c439da9..caaf4862195bec4 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q3.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q3.groovy @@ -31,7 +31,8 @@ suite("q3") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q4.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q4.groovy index 365e650434557bf..546534c4f749097 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q4.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q4.groovy @@ -25,7 +25,8 @@ suite("q4") { sql "set runtime_filter_mode='GLOBAL'" sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q5.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q5.groovy index 748814e52ac184e..ea5d6b920aef0af 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q5.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q5.groovy @@ -30,7 +30,8 @@ suite("q5") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q6.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q6.groovy index d8bc89b8ef69823..748ec651ca3f99d 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q6.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q6.groovy @@ -30,7 +30,8 @@ suite("q6") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q7.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q7.groovy index 697be7ff4c6c1f9..938840abbecbc88 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q7.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q7.groovy @@ -30,7 +30,8 @@ suite("q7") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q8.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q8.groovy index ef4618273ca092b..0867d4174f9fa0e 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q8.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q8.groovy @@ -30,7 +30,8 @@ suite("q8") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q9.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q9.groovy index bdf212430ebcfd1..f536ae3274570ef 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q9.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q9.groovy @@ -30,7 +30,8 @@ suite("q9") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set forbid_unknown_col_stats=true' sql 'set enable_runtime_filter_prune=true' sql 'set enable_stats=true' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q1.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q1.groovy index bb7fc3fac9ec7f0..e52bf3df78bc155 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q1.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q1.groovy @@ -25,7 +25,8 @@ suite("q1") { sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' sql 'set parallel_pipeline_task_num=8' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q10.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q10.groovy index 26724582f24cb1a..78d292de046c46f 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q10.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q10.groovy @@ -27,7 +27,8 @@ suite("q10") { sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' sql 'set parallel_pipeline_task_num=8' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ explain shape plan diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q11.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q11.groovy index 0eb87114d2464fb..e9e924221115a73 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q11.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q11.groovy @@ -26,7 +26,8 @@ suite("q11") { sql 'set parallel_pipeline_task_num=8' sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q12.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q12.groovy index 7a16d159a3eda43..8375e1635053264 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q12.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q12.groovy @@ -26,7 +26,8 @@ suite("q12") { sql 'set parallel_pipeline_task_num=8' sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ explain shape plan diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q13.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q13.groovy index b3de091a6daaff9..48909dabbd46014 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q13.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q13.groovy @@ -30,7 +30,8 @@ suite("q13") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ explain shape plan diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q14.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q14.groovy index de0af20ee9bf232..7838db82e6b7a90 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q14.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q14.groovy @@ -27,7 +27,8 @@ suite("q14") { sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' sql 'set parallel_pipeline_task_num=8' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ explain shape plan diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q15.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q15.groovy index 8f763346b72375a..ccb5ce5ae3397b3 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q15.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q15.groovy @@ -30,7 +30,8 @@ suite("q15") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q16.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q16.groovy index 6121fbac279984b..5e3f8545b594e9c 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q16.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q16.groovy @@ -30,7 +30,8 @@ suite("q16") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q17.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q17.groovy index ed8e34dce0d1e14..f007b11e17e085c 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q17.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q17.groovy @@ -30,7 +30,8 @@ suite("q17") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q18.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q18.groovy index 2a6df6c6e2d53d7..7e4ecc150c95d54 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q18.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q18.groovy @@ -30,7 +30,8 @@ suite("q18") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q19.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q19.groovy index eeded0e8c91a51e..a25736e7b39b83f 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q19.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q19.groovy @@ -31,7 +31,8 @@ suite("q19") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q2.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q2.groovy index d51483b48c63085..f367540d15aa7a1 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q2.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q2.groovy @@ -30,7 +30,8 @@ suite("q2") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q20-rewrite.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q20-rewrite.groovy index aa90e6adcd150b3..825d2bab5286092 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q20-rewrite.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q20-rewrite.groovy @@ -31,7 +31,8 @@ suite("q20-rewrite") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q20.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q20.groovy index 42d49a09472582d..d0b1f3a81caee57 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q20.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q20.groovy @@ -31,7 +31,8 @@ suite("q20") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q21.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q21.groovy index 30202053943a722..4968986c5576e16 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q21.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q21.groovy @@ -30,7 +30,8 @@ suite("q21") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q22.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q22.groovy index 0dee2154993c3a3..cccd2f29fafc465 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q22.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q22.groovy @@ -30,7 +30,8 @@ suite("q22") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q3.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q3.groovy index c95ea0f994dc75e..3181cd40db01809 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q3.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q3.groovy @@ -31,7 +31,8 @@ suite("q3") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q4.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q4.groovy index 0885ba53ac3aa27..4f60a57e131d13b 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q4.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q4.groovy @@ -25,7 +25,8 @@ suite("q4") { sql "set runtime_filter_mode='GLOBAL'" sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set parallel_pipeline_task_num=8' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q5.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q5.groovy index ddd13e35c990fa1..8ed3facf7e52b04 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q5.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q5.groovy @@ -30,7 +30,8 @@ suite("q5") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q6.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q6.groovy index b417d9f0235fa09..36ad4c1559e3610 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q6.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q6.groovy @@ -30,7 +30,8 @@ suite("q6") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q7.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q7.groovy index bf6c7b8f0d186a1..a612ebb5a64701a 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q7.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q7.groovy @@ -30,7 +30,8 @@ suite("q7") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q8.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q8.groovy index eb4fe4a120dd76a..597b3b10165c0e8 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q8.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q8.groovy @@ -30,7 +30,8 @@ suite("q8") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q9.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q9.groovy index 812e066a6403bdc..d640f7cb9729536 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q9.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q9.groovy @@ -30,7 +30,8 @@ suite("q9") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' qt_select """ diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q1.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q1.groovy index 6c2d3417bd11ae5..59f149db8ee4607 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q1.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q1.groovy @@ -25,7 +25,8 @@ suite("q1") { sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' sql 'set parallel_pipeline_task_num=8' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q10.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q10.groovy index 619391709630035..9f696db659280f6 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q10.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q10.groovy @@ -27,7 +27,8 @@ suite("q10") { sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' sql 'set parallel_pipeline_task_num=8' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q11.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q11.groovy index cb75fe5373778a7..b72e0b392f758cf 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q11.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q11.groovy @@ -26,7 +26,8 @@ suite("q11") { sql 'set parallel_pipeline_task_num=8' sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q12.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q12.groovy index 764ff877eae21fa..e4c0f670d56f92f 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q12.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q12.groovy @@ -26,7 +26,8 @@ suite("q12") { sql 'set parallel_pipeline_task_num=8' sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q13.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q13.groovy index 631a2dbf50dbd8c..991a00d739e65d3 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q13.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q13.groovy @@ -30,7 +30,8 @@ suite("q13") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q14.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q14.groovy index c918e68eef27bd3..2b109a4996bae7c 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q14.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q14.groovy @@ -27,7 +27,8 @@ suite("q14") { sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' sql 'set parallel_pipeline_task_num=8' -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q15.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q15.groovy index 50d814689de7bb8..eb627a0a0682937 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q15.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q15.groovy @@ -30,7 +30,8 @@ suite("q15") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q16.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q16.groovy index f99471730a7b7cc..5e7a35611c0a32e 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q16.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q16.groovy @@ -30,7 +30,8 @@ suite("q16") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q17.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q17.groovy index bb6bd60176b88f5..f365a230ac6cdd3 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q17.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q17.groovy @@ -30,7 +30,8 @@ suite("q17") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q18.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q18.groovy index 26d0ad009faf4a8..ffdf6163269155b 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q18.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q18.groovy @@ -30,7 +30,8 @@ suite("q18") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q19.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q19.groovy index abd5c8487290665..ec91b1f745baf0f 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q19.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q19.groovy @@ -31,7 +31,8 @@ suite("q19") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q2.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q2.groovy index 9abd6f7f4197b1a..8f0d99c46448d8b 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q2.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q2.groovy @@ -30,7 +30,8 @@ suite("q2") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20-rewrite.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20-rewrite.groovy index afb4adbf0906522..77cc83688227a79 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20-rewrite.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20-rewrite.groovy @@ -31,7 +31,8 @@ suite("q20-rewrite") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20.groovy index 0a8f3bfb5a2e542..e59a0a70b08f2c6 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20.groovy @@ -31,7 +31,8 @@ suite("q20") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q21.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q21.groovy index 24600e6c6b5c2ab..74fd3aa486a6f07 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q21.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q21.groovy @@ -30,7 +30,8 @@ suite("q21") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q22.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q22.groovy index 3c4e9b6c30334a3..1ec5264ef3091fc 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q22.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q22.groovy @@ -30,7 +30,8 @@ suite("q22") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q3.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q3.groovy index 099f74a3315c590..7d5631623839b98 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q3.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q3.groovy @@ -31,7 +31,8 @@ suite("q3") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q4.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q4.groovy index 8de4e509e43764d..25da27e3b5efa00 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q4.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q4.groovy @@ -25,7 +25,8 @@ suite("q4") { sql "set runtime_filter_mode='GLOBAL'" sql 'set exec_mem_limit=21G' sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q5.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q5.groovy index 0599eb7e978d027..d10c624ff5ce69b 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q5.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q5.groovy @@ -30,7 +30,8 @@ suite("q5") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q6.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q6.groovy index 9262cd566bf008e..12abff48e661be5 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q6.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q6.groovy @@ -30,7 +30,8 @@ suite("q6") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q7.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q7.groovy index 807d45988494635..544f09cbdf74681 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q7.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q7.groovy @@ -30,7 +30,8 @@ suite("q7") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q8.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q8.groovy index 6c19378b67f7356..178ac21e789343c 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q8.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q8.groovy @@ -30,7 +30,8 @@ suite("q8") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false' diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q9.groovy b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q9.groovy index 7c58ea2609f40a9..a724369a9ce05ae 100644 --- a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q9.groovy +++ b/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q9.groovy @@ -30,7 +30,8 @@ suite("q9") { -sql 'set be_number_for_test=3' + sql 'set be_number_for_test=3' + sql "set runtime_filter_type=8" sql 'set enable_runtime_filter_prune=false' sql 'set forbid_unknown_col_stats=false;' sql 'set enable_runtime_filter_prune=false'