-
Notifications
You must be signed in to change notification settings - Fork 4.7k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
5 changed files
with
545 additions
and
9 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
21 changes: 21 additions & 0 deletions
21
ql/src/test/queries/clientpositive/cbo_distribute_sort_cluster_by_pos.q
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,21 @@ | ||
create table t1 (a string, b int, c int); | ||
|
||
-- distribute by | ||
explain cbo | ||
select * from t1 distribute by 2; | ||
explain | ||
select * from t1 distribute by 2; | ||
|
||
-- distribute by and sort by | ||
explain cbo | ||
select * from t1 distribute by 1, b sort by 2; | ||
|
||
explain | ||
select * from t1 distribute by 1, b sort by 2; | ||
|
||
-- cluster by | ||
explain cbo | ||
select * from t1 cluster by 1, b; | ||
|
||
explain | ||
select * from t1 cluster by 1, b; |
35 changes: 35 additions & 0 deletions
35
ql/src/test/queries/clientpositive/cbo_order_distribute_sort_cluster_by_cnst.q
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,35 @@ | ||
set hive.orderby.position.alias=false; | ||
|
||
create table t1 (a string, b int, c int); | ||
|
||
-- order by | ||
explain cbo | ||
select * from t1 order by 2, 3; | ||
explain | ||
select * from t1 order by 2, 3; | ||
|
||
-- distribute by | ||
explain cbo | ||
select * from t1 distribute by 2; | ||
explain | ||
select * from t1 distribute by 2; | ||
|
||
-- distribute by and sort by | ||
explain cbo | ||
select * from t1 distribute by 1, b sort by 2; | ||
|
||
explain | ||
select * from t1 distribute by 1, b sort by 2; | ||
|
||
-- cluster by | ||
explain cbo | ||
select * from t1 cluster by 1; | ||
|
||
explain | ||
select * from t1 cluster by 1; | ||
|
||
explain cbo | ||
select * from t1 cluster by 1, b; | ||
|
||
explain | ||
select * from t1 cluster by 1, b; |
199 changes: 199 additions & 0 deletions
199
ql/src/test/results/clientpositive/llap/cbo_distribute_sort_cluster_by_pos.q.out
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,199 @@ | ||
PREHOOK: query: create table t1 (a string, b int, c int) | ||
PREHOOK: type: CREATETABLE | ||
PREHOOK: Output: database:default | ||
PREHOOK: Output: default@t1 | ||
POSTHOOK: query: create table t1 (a string, b int, c int) | ||
POSTHOOK: type: CREATETABLE | ||
POSTHOOK: Output: database:default | ||
POSTHOOK: Output: default@t1 | ||
PREHOOK: query: explain cbo | ||
select * from t1 distribute by 2 | ||
PREHOOK: type: QUERY | ||
PREHOOK: Input: default@t1 | ||
#### A masked pattern was here #### | ||
POSTHOOK: query: explain cbo | ||
select * from t1 distribute by 2 | ||
POSTHOOK: type: QUERY | ||
POSTHOOK: Input: default@t1 | ||
#### A masked pattern was here #### | ||
CBO PLAN: | ||
HiveProject(a=[$0], b=[$1], c=[$2]) | ||
HiveTableScan(table=[[default, t1]], table:alias=[t1]) | ||
|
||
PREHOOK: query: explain | ||
select * from t1 distribute by 2 | ||
PREHOOK: type: QUERY | ||
PREHOOK: Input: default@t1 | ||
#### A masked pattern was here #### | ||
POSTHOOK: query: explain | ||
select * from t1 distribute by 2 | ||
POSTHOOK: type: QUERY | ||
POSTHOOK: Input: default@t1 | ||
#### A masked pattern was here #### | ||
STAGE DEPENDENCIES: | ||
Stage-0 is a root stage | ||
|
||
STAGE PLANS: | ||
Stage: Stage-0 | ||
Fetch Operator | ||
limit: -1 | ||
Processor Tree: | ||
TableScan | ||
alias: t1 | ||
Select Operator | ||
expressions: a (type: string), b (type: int), c (type: int) | ||
outputColumnNames: _col0, _col1, _col2 | ||
ListSink | ||
|
||
PREHOOK: query: explain cbo | ||
select * from t1 distribute by 1, b sort by 2 | ||
PREHOOK: type: QUERY | ||
PREHOOK: Input: default@t1 | ||
#### A masked pattern was here #### | ||
POSTHOOK: query: explain cbo | ||
select * from t1 distribute by 1, b sort by 2 | ||
POSTHOOK: type: QUERY | ||
POSTHOOK: Input: default@t1 | ||
#### A masked pattern was here #### | ||
CBO PLAN: | ||
HiveSortExchange(distribution=[hash[0, 1]], collation=[[1]]) | ||
HiveProject(a=[$0], b=[$1], c=[$2]) | ||
HiveTableScan(table=[[default, t1]], table:alias=[t1]) | ||
|
||
PREHOOK: query: explain | ||
select * from t1 distribute by 1, b sort by 2 | ||
PREHOOK: type: QUERY | ||
PREHOOK: Input: default@t1 | ||
#### A masked pattern was here #### | ||
POSTHOOK: query: explain | ||
select * from t1 distribute by 1, b sort by 2 | ||
POSTHOOK: type: QUERY | ||
POSTHOOK: Input: default@t1 | ||
#### A masked pattern was here #### | ||
STAGE DEPENDENCIES: | ||
Stage-1 is a root stage | ||
Stage-0 depends on stages: Stage-1 | ||
|
||
STAGE PLANS: | ||
Stage: Stage-1 | ||
Tez | ||
#### A masked pattern was here #### | ||
Edges: | ||
Reducer 2 <- Map 1 (SIMPLE_EDGE) | ||
#### A masked pattern was here #### | ||
Vertices: | ||
Map 1 | ||
Map Operator Tree: | ||
TableScan | ||
alias: t1 | ||
Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE | ||
Select Operator | ||
expressions: a (type: string), b (type: int), c (type: int) | ||
outputColumnNames: _col0, _col1, _col2 | ||
Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE | ||
Reduce Output Operator | ||
key expressions: _col1 (type: int) | ||
null sort order: z | ||
sort order: + | ||
Map-reduce partition columns: _col0 (type: string), _col1 (type: int) | ||
Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE | ||
value expressions: _col0 (type: string), _col2 (type: int) | ||
Execution mode: vectorized, llap | ||
LLAP IO: all inputs | ||
Reducer 2 | ||
Execution mode: vectorized, llap | ||
Reduce Operator Tree: | ||
Select Operator | ||
expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: int), VALUE._col1 (type: int) | ||
outputColumnNames: _col0, _col1, _col2 | ||
Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE | ||
File Output Operator | ||
compressed: false | ||
Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE | ||
table: | ||
input format: org.apache.hadoop.mapred.SequenceFileInputFormat | ||
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat | ||
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe | ||
|
||
Stage: Stage-0 | ||
Fetch Operator | ||
limit: -1 | ||
Processor Tree: | ||
ListSink | ||
|
||
PREHOOK: query: explain cbo | ||
select * from t1 cluster by 1, b | ||
PREHOOK: type: QUERY | ||
PREHOOK: Input: default@t1 | ||
#### A masked pattern was here #### | ||
POSTHOOK: query: explain cbo | ||
select * from t1 cluster by 1, b | ||
POSTHOOK: type: QUERY | ||
POSTHOOK: Input: default@t1 | ||
#### A masked pattern was here #### | ||
CBO PLAN: | ||
HiveSortExchange(distribution=[hash[0, 1]], collation=[[0 ASC-nulls-first, 1 ASC-nulls-first]]) | ||
HiveProject(a=[$0], b=[$1], c=[$2]) | ||
HiveTableScan(table=[[default, t1]], table:alias=[t1]) | ||
|
||
PREHOOK: query: explain | ||
select * from t1 cluster by 1, b | ||
PREHOOK: type: QUERY | ||
PREHOOK: Input: default@t1 | ||
#### A masked pattern was here #### | ||
POSTHOOK: query: explain | ||
select * from t1 cluster by 1, b | ||
POSTHOOK: type: QUERY | ||
POSTHOOK: Input: default@t1 | ||
#### A masked pattern was here #### | ||
STAGE DEPENDENCIES: | ||
Stage-1 is a root stage | ||
Stage-0 depends on stages: Stage-1 | ||
|
||
STAGE PLANS: | ||
Stage: Stage-1 | ||
Tez | ||
#### A masked pattern was here #### | ||
Edges: | ||
Reducer 2 <- Map 1 (SIMPLE_EDGE) | ||
#### A masked pattern was here #### | ||
Vertices: | ||
Map 1 | ||
Map Operator Tree: | ||
TableScan | ||
alias: t1 | ||
Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE | ||
Select Operator | ||
expressions: a (type: string), b (type: int), c (type: int) | ||
outputColumnNames: _col0, _col1, _col2 | ||
Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE | ||
Reduce Output Operator | ||
key expressions: _col0 (type: string), _col1 (type: int) | ||
null sort order: aa | ||
sort order: ++ | ||
Map-reduce partition columns: _col0 (type: string), _col1 (type: int) | ||
Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE | ||
value expressions: _col2 (type: int) | ||
Execution mode: vectorized, llap | ||
LLAP IO: all inputs | ||
Reducer 2 | ||
Execution mode: vectorized, llap | ||
Reduce Operator Tree: | ||
Select Operator | ||
expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: int), VALUE._col0 (type: int) | ||
outputColumnNames: _col0, _col1, _col2 | ||
Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE | ||
File Output Operator | ||
compressed: false | ||
Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE | ||
table: | ||
input format: org.apache.hadoop.mapred.SequenceFileInputFormat | ||
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat | ||
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe | ||
|
||
Stage: Stage-0 | ||
Fetch Operator | ||
limit: -1 | ||
Processor Tree: | ||
ListSink | ||
|
Oops, something went wrong.