PREHOOK: query: USE default PREHOOK: type: SWITCHDATABASE PREHOOK: Input: database:default POSTHOOK: query: USE default POSTHOOK: type: SWITCHDATABASE POSTHOOK: Input: database:default PREHOOK: query: -- EXCLUDE_OS_WINDOWS -- excluded on windows because of difference in file name encoding logic -- SORT_QUERY_RESULTS create table combine2(key string) partitioned by (value string) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@combine2 POSTHOOK: query: -- EXCLUDE_OS_WINDOWS -- excluded on windows because of difference in file name encoding logic -- SORT_QUERY_RESULTS create table combine2(key string) partitioned by (value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@combine2 PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S) -- This test sets mapred.max.split.size=256 and hive.merge.smallfiles.avgsize=0 -- in an attempt to force the generation of multiple splits and multiple output files. -- However, Hadoop 0.20 is incapable of generating splits smaller than the block size -- when using CombineFileInputFormat, so only one split is generated. This has a -- significant impact on the results results of this test. -- This issue was fixed in MAPREDUCE-2046 which is included in 0.22. insert overwrite table combine2 partition(value) select * from ( select key, value from src where key < 10 union all select key, '|' as value from src where key = 11 union all select key, '2010-04-21 09:45:00' value from src where key = 19) s PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@combine2 POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S) -- This test sets mapred.max.split.size=256 and hive.merge.smallfiles.avgsize=0 -- in an attempt to force the generation of multiple splits and multiple output files. -- However, Hadoop 0.20 is incapable of generating splits smaller than the block size -- when using CombineFileInputFormat, so only one split is generated. This has a -- significant impact on the results results of this test. -- This issue was fixed in MAPREDUCE-2046 which is included in 0.22. insert overwrite table combine2 partition(value) select * from ( select key, value from src where key < 10 union all select key, '|' as value from src where key = 11 union all select key, '2010-04-21 09:45:00' value from src where key = 19) s POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@combine2@value=2010-04-21 09%3A45%3A00 POSTHOOK: Output: default@combine2@value=val_0 POSTHOOK: Output: default@combine2@value=val_2 POSTHOOK: Output: default@combine2@value=val_4 POSTHOOK: Output: default@combine2@value=val_5 POSTHOOK: Output: default@combine2@value=val_8 POSTHOOK: Output: default@combine2@value=val_9 POSTHOOK: Output: default@combine2@value=| POSTHOOK: Lineage: combine2 PARTITION(value=2010-04-21 09:45:00).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_0).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_4).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_5).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_8).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_9).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=|).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: show partitions combine2 PREHOOK: type: SHOWPARTITIONS PREHOOK: Input: default@combine2 POSTHOOK: query: show partitions combine2 POSTHOOK: type: SHOWPARTITIONS POSTHOOK: Input: default@combine2 value=2010-04-21 09%3A45%3A00 value=val_0 value=val_2 value=val_4 value=val_5 value=val_8 value=val_9 value=| PREHOOK: query: explain select key, value from combine2 where value is not null PREHOOK: type: QUERY POSTHOOK: query: explain select key, value from combine2 where value is not null POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-0 is a root stage STAGE PLANS: Stage: Stage-0 Fetch Operator limit: -1 Processor Tree: TableScan alias: combine2 Statistics: Num rows: 12 Data size: 14 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 12 Data size: 14 Basic stats: COMPLETE Column stats: NONE ListSink PREHOOK: query: select key, value from combine2 where value is not null PREHOOK: type: QUERY PREHOOK: Input: default@combine2 PREHOOK: Input: default@combine2@value=2010-04-21 09%3A45%3A00 PREHOOK: Input: default@combine2@value=val_0 PREHOOK: Input: default@combine2@value=val_2 PREHOOK: Input: default@combine2@value=val_4 PREHOOK: Input: default@combine2@value=val_5 PREHOOK: Input: default@combine2@value=val_8 PREHOOK: Input: default@combine2@value=val_9 PREHOOK: Input: default@combine2@value=| #### A masked pattern was here #### POSTHOOK: query: select key, value from combine2 where value is not null POSTHOOK: type: QUERY POSTHOOK: Input: default@combine2 POSTHOOK: Input: default@combine2@value=2010-04-21 09%3A45%3A00 POSTHOOK: Input: default@combine2@value=val_0 POSTHOOK: Input: default@combine2@value=val_2 POSTHOOK: Input: default@combine2@value=val_4 POSTHOOK: Input: default@combine2@value=val_5 POSTHOOK: Input: default@combine2@value=val_8 POSTHOOK: Input: default@combine2@value=val_9 POSTHOOK: Input: default@combine2@value=| #### A masked pattern was here #### 0 val_0 0 val_0 0 val_0 11 | 19 2010-04-21 09:45:00 2 val_2 4 val_4 5 val_5 5 val_5 5 val_5 8 val_8 9 val_9 PREHOOK: query: explain extended select count(1) from combine2 where value is not null PREHOOK: type: QUERY POSTHOOK: query: explain extended select count(1) from combine2 where value is not null POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: TOK_QUERY TOK_FROM TOK_TABREF TOK_TABNAME combine2 TOK_INSERT TOK_DESTINATION TOK_DIR TOK_TMP_FILE TOK_SELECT TOK_SELEXPR TOK_FUNCTION count 1 TOK_WHERE TOK_FUNCTION TOK_ISNOTNULL TOK_TABLE_OR_COL value STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 STAGE PLANS: Stage: Stage-1 Map Reduce Map Operator Tree: TableScan alias: combine2 Statistics: Num rows: 12 Data size: 14 Basic stats: COMPLETE Column stats: NONE GatherStats: false Select Operator Statistics: Num rows: 12 Data size: 14 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) mode: hash outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col0 (type: bigint) auto parallelism: false Path -> Alias: #### A masked pattern was here #### Path -> Partition: #### A masked pattern was here #### Partition base file name: value=2010-04-21 09%3A45%3A00 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: value 2010-04-21 09:45:00 properties: COLUMN_STATS_ACCURATE true bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 numFiles 1 numRows 1 partition_columns value partition_columns.types string rawDataSize 2 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 3 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 partition_columns value partition_columns.types string serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 name: default.combine2 #### A masked pattern was here #### Partition base file name: value=val_0 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: value val_0 properties: COLUMN_STATS_ACCURATE true bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 numFiles 3 numRows 3 partition_columns value partition_columns.types string rawDataSize 3 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 6 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 partition_columns value partition_columns.types string serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 name: default.combine2 #### A masked pattern was here #### Partition base file name: value=val_2 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: value val_2 properties: COLUMN_STATS_ACCURATE true bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 numFiles 1 numRows 1 partition_columns value partition_columns.types string rawDataSize 1 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 partition_columns value partition_columns.types string serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 name: default.combine2 #### A masked pattern was here #### Partition base file name: value=val_4 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: value val_4 properties: COLUMN_STATS_ACCURATE true bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 numFiles 1 numRows 1 partition_columns value partition_columns.types string rawDataSize 1 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 partition_columns value partition_columns.types string serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 name: default.combine2 #### A masked pattern was here #### Partition base file name: value=val_5 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: value val_5 properties: COLUMN_STATS_ACCURATE true bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 numFiles 3 numRows 3 partition_columns value partition_columns.types string rawDataSize 3 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 6 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 partition_columns value partition_columns.types string serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 name: default.combine2 #### A masked pattern was here #### Partition base file name: value=val_8 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: value val_8 properties: COLUMN_STATS_ACCURATE true bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 numFiles 1 numRows 1 partition_columns value partition_columns.types string rawDataSize 1 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 partition_columns value partition_columns.types string serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 name: default.combine2 #### A masked pattern was here #### Partition base file name: value=val_9 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: value val_9 properties: COLUMN_STATS_ACCURATE true bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 numFiles 1 numRows 1 partition_columns value partition_columns.types string rawDataSize 1 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 2 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 partition_columns value partition_columns.types string serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 name: default.combine2 #### A masked pattern was here #### Partition base file name: value=| input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: value | properties: COLUMN_STATS_ACCURATE true bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 numFiles 1 numRows 1 partition_columns value partition_columns.types string rawDataSize 2 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 3 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 partition_columns value partition_columns.types string serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 name: default.combine2 Truncated Path -> Alias: /combine2/value=2010-04-21 09%3A45%3A00 [$hdt$_0:combine2] /combine2/value=val_0 [$hdt$_0:combine2] /combine2/value=val_2 [$hdt$_0:combine2] /combine2/value=val_4 [$hdt$_0:combine2] /combine2/value=val_5 [$hdt$_0:combine2] /combine2/value=val_8 [$hdt$_0:combine2] /combine2/value=val_9 [$hdt$_0:combine2] /combine2/value=| [$hdt$_0:combine2] Needs Tagging: false Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) mode: mergepartial outputColumnNames: _col0 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0 columns.types bigint escape.delim \ hive.serialization.extend.additional.nesting.levels true serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 Processor Tree: ListSink PREHOOK: query: select count(1) from combine2 where value is not null PREHOOK: type: QUERY PREHOOK: Input: default@combine2 PREHOOK: Input: default@combine2@value=2010-04-21 09%3A45%3A00 PREHOOK: Input: default@combine2@value=val_0 PREHOOK: Input: default@combine2@value=val_2 PREHOOK: Input: default@combine2@value=val_4 PREHOOK: Input: default@combine2@value=val_5 PREHOOK: Input: default@combine2@value=val_8 PREHOOK: Input: default@combine2@value=val_9 PREHOOK: Input: default@combine2@value=| #### A masked pattern was here #### POSTHOOK: query: select count(1) from combine2 where value is not null POSTHOOK: type: QUERY POSTHOOK: Input: default@combine2 POSTHOOK: Input: default@combine2@value=2010-04-21 09%3A45%3A00 POSTHOOK: Input: default@combine2@value=val_0 POSTHOOK: Input: default@combine2@value=val_2 POSTHOOK: Input: default@combine2@value=val_4 POSTHOOK: Input: default@combine2@value=val_5 POSTHOOK: Input: default@combine2@value=val_8 POSTHOOK: Input: default@combine2@value=val_9 POSTHOOK: Input: default@combine2@value=| #### A masked pattern was here #### 12 PREHOOK: query: explain select ds, count(1) from srcpart where ds is not null group by ds PREHOOK: type: QUERY POSTHOOK: query: explain select ds, count(1) from srcpart where ds is not null group by ds POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 STAGE PLANS: Stage: Stage-1 Map Reduce Map Operator Tree: TableScan alias: srcpart Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator limit: -1 Processor Tree: ListSink PREHOOK: query: select ds, count(1) from srcpart where ds is not null group by ds PREHOOK: type: QUERY PREHOOK: Input: default@srcpart PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 #### A masked pattern was here #### POSTHOOK: query: select ds, count(1) from srcpart where ds is not null group by ds POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 #### A masked pattern was here #### 2008-04-08 1000 2008-04-09 1000