This is an automated email from the ASF dual-hosted git repository.
krisztiankasa pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 62ad7d42d6f HIVE-28589: Not null constraint does not enforced at
invalid cast (Krisztian Kasa, reviewed by Denys Kuzmenko)
62ad7d42d6f is described below
commit 62ad7d42d6fd4e10ef5b86545ae41011554fd2f5
Author: Krisztian Kasa <[email protected]>
AuthorDate: Mon Jan 13 06:21:18 2025 +0100
HIVE-28589: Not null constraint does not enforced at invalid cast
(Krisztian Kasa, reviewed by Denys Kuzmenko)
---
.../hadoop/hive/ql/parse/SemanticAnalyzer.java | 11 +-
.../clientnegative/constraint_invalid_cast.q | 7 +
.../constraint_invalid_cast_partition.q | 10 +
.../alter_notnull_constraint_violation.q.out | 26 +-
.../clientnegative/constraint_invalid_cast.q.out | 37 +++
.../constraint_invalid_cast_partition.q.out | 46 ++++
.../clientnegative/insert_into_acid_notnull.q.out | 27 +-
.../insert_into_notnull_constraint.q.out | 26 +-
.../insert_overwrite_notnull_constraint.q.out | 26 +-
.../clientpositive/llap/check_constraint.q.out | 290 +++++++++------------
.../clientpositive/llap/default_constraint.q.out | 36 +--
.../llap/enforce_constraint_notnull.q.out | 160 +++++-------
12 files changed, 412 insertions(+), 290 deletions(-)
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 04ddb857c0d..d30e8afaf32 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -7527,9 +7527,6 @@ public class SemanticAnalyzer extends
BaseSemanticAnalyzer {
dpCtx.setRootPath(queryTmpdir);
}
- // Add NOT NULL constraint check
- input = genConstraintsPlan(dest, qb, input);
-
if (!qb.getIsQuery()) {
isAlreadyContainsPartCols = Optional.ofNullable(destinationTable)
.map(Table::getStorageHandler)
@@ -7546,6 +7543,9 @@ public class SemanticAnalyzer extends
BaseSemanticAnalyzer {
}
}
+ // Add NOT NULL constraint check
+ input = genConstraintsPlan(dest, qb, input);
+
if (destinationTable.isMaterializedView() &&
mvRebuildMode ==
MaterializationRebuildMode.INSERT_OVERWRITE_REBUILD) {
// Data organization (DISTRIBUTED, SORTED, CLUSTERED) for materialized
view
@@ -7674,8 +7674,6 @@ public class SemanticAnalyzer extends
BaseSemanticAnalyzer {
+ queryTmpdir + " from " + destinationPath);
}
- // Add NOT NULL constraint check
- input = genConstraintsPlan(dest, qb, input);
if (destinationTable.getStorageHandler() != null &&
destinationTable.getStorageHandler().alwaysUnpartitioned()) {
partSpec = qbm.getPartSpecForAlias(dest);
}
@@ -7695,6 +7693,9 @@ public class SemanticAnalyzer extends
BaseSemanticAnalyzer {
}
}
+ // Add NOT NULL constraint check
+ input = genConstraintsPlan(dest, qb, input);
+
if (destinationTable.isMaterializedView() &&
mvRebuildMode ==
MaterializationRebuildMode.INSERT_OVERWRITE_REBUILD) {
// Data organization (DISTRIBUTED, SORTED, CLUSTERED) for materialized
view
diff --git a/ql/src/test/queries/clientnegative/constraint_invalid_cast.q
b/ql/src/test/queries/clientnegative/constraint_invalid_cast.q
new file mode 100644
index 00000000000..ab216b84372
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/constraint_invalid_cast.q
@@ -0,0 +1,7 @@
+-- cast('2024-99-99' as date) returns null hence not null constraint violation;
+
+create table t1(
+ date_col date not null
+);
+
+insert into t1 values ('2024-99-99');
diff --git
a/ql/src/test/queries/clientnegative/constraint_invalid_cast_partition.q
b/ql/src/test/queries/clientnegative/constraint_invalid_cast_partition.q
new file mode 100644
index 00000000000..a39670991a0
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/constraint_invalid_cast_partition.q
@@ -0,0 +1,10 @@
+-- cast('2024-99-99' as date) returns null hence not null constraint violation;
+
+create table t1 (
+ date_col date not null
+)
+partitioned by (p string);
+
+alter table t1 add partition (p = 'a');
+
+insert into t1 partition(p='a') values ('2024-99-99');
diff --git
a/ql/src/test/results/clientnegative/alter_notnull_constraint_violation.q.out
b/ql/src/test/results/clientnegative/alter_notnull_constraint_violation.q.out
index 2445b5de7f9..9a2bb338b1e 100644
---
a/ql/src/test/results/clientnegative/alter_notnull_constraint_violation.q.out
+++
b/ql/src/test/results/clientnegative/alter_notnull_constraint_violation.q.out
@@ -24,4 +24,28 @@ POSTHOOK: query: alter table t1 change j j int constraint
nn0 not null enforced
POSTHOOK: type: ALTERTABLE_RENAMECOL
POSTHOOK: Input: default@t1
POSTHOOK: Output: default@t1
-FAILED: DataConstraintViolationError
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+PREHOOK: query: insert into t1 values(2,null)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@t1
+Status: Failed
+Vertex failed, vertexName=Map 1, vertexId=vertex_#ID#, diagnostics=[Task
failed, taskId=task_#ID#, diagnostics=[TaskAttempt 0 failed, info=[Error: Error
while running task ( failure ) : java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+, errorMessage=Cannot recover from this error:java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+]], Vertex did not succeed due to OWN_TASK_FAILURE, failedTasks:1
killedTasks:0, Vertex vertex_#ID# [Map 1] killed/failed due to:OWN_TASK_FAILURE]
+[Masked Vertex killed due to OTHER_VERTEX_FAILURE]
+DAG did not succeed due to VERTEX_FAILURE. failedVertices:1 killedVertices:1
+FAILED: Execution Error, return code 2 from
org.apache.hadoop.hive.ql.exec.tez.TezTask. Vertex failed, vertexName=Map 1,
vertexId=vertex_#ID#, diagnostics=[Task failed, taskId=task_#ID#,
diagnostics=[TaskAttempt 0 failed, info=[Error: Error while running task (
failure ) : java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+, errorMessage=Cannot recover from this error:java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+]], Vertex did not succeed due to OWN_TASK_FAILURE, failedTasks:1
killedTasks:0, Vertex vertex_#ID# [Map 1] killed/failed due
to:OWN_TASK_FAILURE][Masked Vertex killed due to OTHER_VERTEX_FAILURE]DAG did
not succeed due to VERTEX_FAILURE. failedVertices:1 killedVertices:1
diff --git a/ql/src/test/results/clientnegative/constraint_invalid_cast.q.out
b/ql/src/test/results/clientnegative/constraint_invalid_cast.q.out
new file mode 100644
index 00000000000..662005406c6
--- /dev/null
+++ b/ql/src/test/results/clientnegative/constraint_invalid_cast.q.out
@@ -0,0 +1,37 @@
+PREHOOK: query: create table t1(
+ date_col date not null
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@t1
+POSTHOOK: query: create table t1(
+ date_col date not null
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@t1
+PREHOOK: query: insert into t1 values ('2024-99-99')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@t1
+Status: Failed
+Vertex failed, vertexName=Map 1, vertexId=vertex_#ID#, diagnostics=[Task
failed, taskId=task_#ID#, diagnostics=[TaskAttempt 0 failed, info=[Error: Error
while running task ( failure ) : java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+, errorMessage=Cannot recover from this error:java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+]], Vertex did not succeed due to OWN_TASK_FAILURE, failedTasks:1
killedTasks:0, Vertex vertex_#ID# [Map 1] killed/failed due to:OWN_TASK_FAILURE]
+[Masked Vertex killed due to OTHER_VERTEX_FAILURE]
+DAG did not succeed due to VERTEX_FAILURE. failedVertices:1 killedVertices:1
+FAILED: Execution Error, return code 2 from
org.apache.hadoop.hive.ql.exec.tez.TezTask. Vertex failed, vertexName=Map 1,
vertexId=vertex_#ID#, diagnostics=[Task failed, taskId=task_#ID#,
diagnostics=[TaskAttempt 0 failed, info=[Error: Error while running task (
failure ) : java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+, errorMessage=Cannot recover from this error:java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+]], Vertex did not succeed due to OWN_TASK_FAILURE, failedTasks:1
killedTasks:0, Vertex vertex_#ID# [Map 1] killed/failed due
to:OWN_TASK_FAILURE][Masked Vertex killed due to OTHER_VERTEX_FAILURE]DAG did
not succeed due to VERTEX_FAILURE. failedVertices:1 killedVertices:1
diff --git
a/ql/src/test/results/clientnegative/constraint_invalid_cast_partition.q.out
b/ql/src/test/results/clientnegative/constraint_invalid_cast_partition.q.out
new file mode 100644
index 00000000000..739061a9b3c
--- /dev/null
+++ b/ql/src/test/results/clientnegative/constraint_invalid_cast_partition.q.out
@@ -0,0 +1,46 @@
+PREHOOK: query: create table t1 (
+ date_col date not null
+)
+partitioned by (p string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@t1
+POSTHOOK: query: create table t1 (
+ date_col date not null
+)
+partitioned by (p string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@t1
+PREHOOK: query: alter table t1 add partition (p = 'a')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@t1
+POSTHOOK: query: alter table t1 add partition (p = 'a')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@t1
+POSTHOOK: Output: default@t1@p=a
+PREHOOK: query: insert into t1 partition(p='a') values ('2024-99-99')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@t1@p=a
+Status: Failed
+Vertex failed, vertexName=Map 1, vertexId=vertex_#ID#, diagnostics=[Task
failed, taskId=task_#ID#, diagnostics=[TaskAttempt 0 failed, info=[Error: Error
while running task ( failure ) : java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+, errorMessage=Cannot recover from this error:java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+]], Vertex did not succeed due to OWN_TASK_FAILURE, failedTasks:1
killedTasks:0, Vertex vertex_#ID# [Map 1] killed/failed due to:OWN_TASK_FAILURE]
+[Masked Vertex killed due to OTHER_VERTEX_FAILURE]
+DAG did not succeed due to VERTEX_FAILURE. failedVertices:1 killedVertices:1
+FAILED: Execution Error, return code 2 from
org.apache.hadoop.hive.ql.exec.tez.TezTask. Vertex failed, vertexName=Map 1,
vertexId=vertex_#ID#, diagnostics=[Task failed, taskId=task_#ID#,
diagnostics=[TaskAttempt 0 failed, info=[Error: Error while running task (
failure ) : java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+, errorMessage=Cannot recover from this error:java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+]], Vertex did not succeed due to OWN_TASK_FAILURE, failedTasks:1
killedTasks:0, Vertex vertex_#ID# [Map 1] killed/failed due
to:OWN_TASK_FAILURE][Masked Vertex killed due to OTHER_VERTEX_FAILURE]DAG did
not succeed due to VERTEX_FAILURE. failedVertices:1 killedVertices:1
diff --git a/ql/src/test/results/clientnegative/insert_into_acid_notnull.q.out
b/ql/src/test/results/clientnegative/insert_into_acid_notnull.q.out
index 777a0878782..9894796823c 100644
--- a/ql/src/test/results/clientnegative/insert_into_acid_notnull.q.out
+++ b/ql/src/test/results/clientnegative/insert_into_acid_notnull.q.out
@@ -10,4 +10,29 @@ POSTHOOK: query: create table acid_uami(i int,
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@acid_uami
-FAILED: DataConstraintViolationError
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+PREHOOK: query: insert into table acid_uami select 1, null, null
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@acid_uami
+Status: Failed
+Vertex failed, vertexName=Map 1, vertexId=vertex_#ID#, diagnostics=[Task
failed, taskId=task_#ID#, diagnostics=[TaskAttempt 0 failed, info=[Error: Error
while running task ( failure ) : java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+, errorMessage=Cannot recover from this error:java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+]], Vertex did not succeed due to OWN_TASK_FAILURE, failedTasks:1
killedTasks:0, Vertex vertex_#ID# [Map 1] killed/failed due to:OWN_TASK_FAILURE]
+[Masked Vertex killed due to OTHER_VERTEX_FAILURE]
+[Masked Vertex killed due to OTHER_VERTEX_FAILURE]
+DAG did not succeed due to VERTEX_FAILURE. failedVertices:1 killedVertices:2
+FAILED: Execution Error, return code 2 from
org.apache.hadoop.hive.ql.exec.tez.TezTask. Vertex failed, vertexName=Map 1,
vertexId=vertex_#ID#, diagnostics=[Task failed, taskId=task_#ID#,
diagnostics=[TaskAttempt 0 failed, info=[Error: Error while running task (
failure ) : java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+, errorMessage=Cannot recover from this error:java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+]], Vertex did not succeed due to OWN_TASK_FAILURE, failedTasks:1
killedTasks:0, Vertex vertex_#ID# [Map 1] killed/failed due
to:OWN_TASK_FAILURE][Masked Vertex killed due to OTHER_VERTEX_FAILURE][Masked
Vertex killed due to OTHER_VERTEX_FAILURE]DAG did not succeed due to
VERTEX_FAILURE. failedVertices:1 killedVertices:2
diff --git
a/ql/src/test/results/clientnegative/insert_into_notnull_constraint.q.out
b/ql/src/test/results/clientnegative/insert_into_notnull_constraint.q.out
index 96feec0d30e..5a56e9974f9 100644
--- a/ql/src/test/results/clientnegative/insert_into_notnull_constraint.q.out
+++ b/ql/src/test/results/clientnegative/insert_into_notnull_constraint.q.out
@@ -6,4 +6,28 @@ POSTHOOK: query: create table nullConstraintCheck(i int NOT
NULL enforced, j int
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@nullConstraintCheck
-FAILED: DataConstraintViolationError
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+PREHOOK: query: insert into nullConstraintCheck values(null,2)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@nullconstraintcheck
+Status: Failed
+Vertex failed, vertexName=Map 1, vertexId=vertex_#ID#, diagnostics=[Task
failed, taskId=task_#ID#, diagnostics=[TaskAttempt 0 failed, info=[Error: Error
while running task ( failure ) : java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+, errorMessage=Cannot recover from this error:java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+]], Vertex did not succeed due to OWN_TASK_FAILURE, failedTasks:1
killedTasks:0, Vertex vertex_#ID# [Map 1] killed/failed due to:OWN_TASK_FAILURE]
+[Masked Vertex killed due to OTHER_VERTEX_FAILURE]
+DAG did not succeed due to VERTEX_FAILURE. failedVertices:1 killedVertices:1
+FAILED: Execution Error, return code 2 from
org.apache.hadoop.hive.ql.exec.tez.TezTask. Vertex failed, vertexName=Map 1,
vertexId=vertex_#ID#, diagnostics=[Task failed, taskId=task_#ID#,
diagnostics=[TaskAttempt 0 failed, info=[Error: Error while running task (
failure ) : java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+, errorMessage=Cannot recover from this error:java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+]], Vertex did not succeed due to OWN_TASK_FAILURE, failedTasks:1
killedTasks:0, Vertex vertex_#ID# [Map 1] killed/failed due
to:OWN_TASK_FAILURE][Masked Vertex killed due to OTHER_VERTEX_FAILURE]DAG did
not succeed due to VERTEX_FAILURE. failedVertices:1 killedVertices:1
diff --git
a/ql/src/test/results/clientnegative/insert_overwrite_notnull_constraint.q.out
b/ql/src/test/results/clientnegative/insert_overwrite_notnull_constraint.q.out
index 96feec0d30e..6818f8f175b 100644
---
a/ql/src/test/results/clientnegative/insert_overwrite_notnull_constraint.q.out
+++
b/ql/src/test/results/clientnegative/insert_overwrite_notnull_constraint.q.out
@@ -6,4 +6,28 @@ POSTHOOK: query: create table nullConstraintCheck(i int NOT
NULL enforced, j int
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@nullConstraintCheck
-FAILED: DataConstraintViolationError
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+PREHOOK: query: insert overwrite table nullConstraintCheck values(null,2)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@nullconstraintcheck
+Status: Failed
+Vertex failed, vertexName=Map 1, vertexId=vertex_#ID#, diagnostics=[Task
failed, taskId=task_#ID#, diagnostics=[TaskAttempt 0 failed, info=[Error: Error
while running task ( failure ) : java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+, errorMessage=Cannot recover from this error:java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+]], Vertex did not succeed due to OWN_TASK_FAILURE, failedTasks:1
killedTasks:0, Vertex vertex_#ID# [Map 1] killed/failed due to:OWN_TASK_FAILURE]
+[Masked Vertex killed due to OTHER_VERTEX_FAILURE]
+DAG did not succeed due to VERTEX_FAILURE. failedVertices:1 killedVertices:1
+FAILED: Execution Error, return code 2 from
org.apache.hadoop.hive.ql.exec.tez.TezTask. Vertex failed, vertexName=Map 1,
vertexId=vertex_#ID#, diagnostics=[Task failed, taskId=task_#ID#,
diagnostics=[TaskAttempt 0 failed, info=[Error: Error while running task (
failure ) : java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+, errorMessage=Cannot recover from this error:java.lang.RuntimeException:
org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError: Either
CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+Caused by: org.apache.hadoop.hive.ql.exec.errors.DataConstraintViolationError:
Either CHECK or NOT NULL constraint violated!
+#### A masked pattern was here ####
+]], Vertex did not succeed due to OWN_TASK_FAILURE, failedTasks:1
killedTasks:0, Vertex vertex_#ID# [Map 1] killed/failed due
to:OWN_TASK_FAILURE][Masked Vertex killed due to OTHER_VERTEX_FAILURE]DAG did
not succeed due to VERTEX_FAILURE. failedVertices:1 killedVertices:1
diff --git a/ql/src/test/results/clientpositive/llap/check_constraint.q.out
b/ql/src/test/results/clientpositive/llap/check_constraint.q.out
index 14dbcc022d9..43c9a641a21 100644
--- a/ql/src/test/results/clientpositive/llap/check_constraint.q.out
+++ b/ql/src/test/results/clientpositive/llap/check_constraint.q.out
@@ -110,23 +110,19 @@ STAGE PLANS:
Statistics: Num rows: 1 Data size: 48 Basic stats:
COMPLETE Column stats: COMPLETE
function name: inline
Select Operator
- expressions: col1 (type: int), col2 (type: int), col3
(type: boolean), col4 (type: int), col5 (type: decimal(3,1)), col6 (type:
decimal(4,1))
+ expressions: col1 (type: int), col2 (type: int), col3
(type: boolean), col4 (type: int), UDFToFloat(col5) (type: float),
UDFToLong(col6) (type: bigint)
outputColumnNames: _col0, _col1, _col2, _col3, _col4,
_col5
- Statistics: Num rows: 1 Data size: 8 Basic stats:
COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 1 Data size: 12 Basic stats:
COMPLETE Column stats: COMPLETE
Filter Operator
- predicate: enforce_constraint((((((((- _col0) > -10)
is not false and (_col1 > 10) is not false) and _col2 is not null is not false)
and _col3 BETWEEN _col0 AND _col1 is not false) and ((_col4) IN (23.4) or
(_col4) IN (56) or (_col4) IN (4)) is not false) and ((_col5 > round(567.6))
and (_col5 < round(1000.4))) is not false)) (type: boolean)
- Statistics: Num rows: 1 Data size: 8 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: int), _col1 (type: int),
_col2 (type: boolean), _col3 (type: int), UDFToFloat(_col4) (type: float),
UDFToLong(_col5) (type: bigint)
- outputColumnNames: _col0, _col1, _col2, _col3,
_col4, _col5
+ predicate: enforce_constraint((((((((- _col0) > -10)
is not false and (_col1 > 10) is not false) and _col2 is not null is not false)
and _col3 BETWEEN _col0 AND _col1 is not false) and (_col4) IN (23.4, 56.0,
4.0) is not false) and ((_col5 > round(567.6)) and (_col5 < round(1000.4))) is
not false)) (type: boolean)
+ Statistics: Num rows: 1 Data size: 12 Basic stats:
COMPLETE Column stats: COMPLETE
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ null sort order: a
+ sort order: +
+ Map-reduce partition columns: _col0 (type: int)
Statistics: Num rows: 1 Data size: 12 Basic stats:
COMPLETE Column stats: COMPLETE
- Reduce Output Operator
- key expressions: _col0 (type: int)
- null sort order: a
- sort order: +
- Map-reduce partition columns: _col0 (type: int)
- Statistics: Num rows: 1 Data size: 12 Basic
stats: COMPLETE Column stats: COMPLETE
- value expressions: _col1 (type: int), _col2
(type: boolean), _col3 (type: int), _col4 (type: float), _col5 (type: bigint)
+ value expressions: _col1 (type: int), _col2 (type:
boolean), _col3 (type: int), _col4 (type: float), _col5 (type: bigint)
Execution mode: llap
LLAP IO: no inputs
Reducer 2
@@ -533,24 +529,20 @@ STAGE PLANS:
Statistics: Num rows: 1 Data size: 48 Basic stats:
COMPLETE Column stats: COMPLETE
function name: inline
Select Operator
- expressions: col1 (type: string), col2 (type: string),
col3 (type: int), col4 (type: string)
+ expressions: col1 (type: string), col2 (type: string),
col3 (type: int), CAST( col4 AS DATE) (type: date)
outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 1 Data size: 8 Basic stats:
COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 1 Data size: 56 Basic stats:
COMPLETE Column stats: COMPLETE
Filter Operator
predicate: enforce_constraint((_col0 is not null and
(_col2 > 0) is not false)) (type: boolean)
- Statistics: Num rows: 1 Data size: 8 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: string), _col1 (type:
string), _col2 (type: int), CAST( _col3 AS DATE) (type: date)
- outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 1 Data size: 56 Basic stats:
COMPLETE Column stats: COMPLETE
+ File Output Operator
+ compressed: false
Statistics: Num rows: 1 Data size: 56 Basic stats:
COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 1 Data size: 56 Basic
stats: COMPLETE Column stats: COMPLETE
- table:
- input format:
org.apache.hadoop.mapred.TextInputFormat
- output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde:
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.tmulti
+ table:
+ input format:
org.apache.hadoop.mapred.TextInputFormat
+ output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde:
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.tmulti
Execution mode: llap
LLAP IO: no inputs
@@ -704,24 +696,20 @@ STAGE PLANS:
Statistics: Num rows: 1 Data size: 48 Basic stats:
COMPLETE Column stats: COMPLETE
function name: inline
Select Operator
- expressions: col1 (type: string), col2 (type: string),
col3 (type: int), col4 (type: string)
+ expressions: col1 (type: string), col2 (type: string),
col3 (type: int), CAST( col4 AS DATE) (type: date)
outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 1 Data size: 8 Basic stats:
COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 1 Data size: 56 Basic stats:
COMPLETE Column stats: COMPLETE
Filter Operator
predicate: enforce_constraint((_col0 is not null and
(((_col2 > 0) is not false and (_col1 <> null) is not false) and ((_col2 <=
10000) and (_col1 <> '')) is not false))) (type: boolean)
- Statistics: Num rows: 1 Data size: 8 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: string), _col1 (type:
string), _col2 (type: int), CAST( _col3 AS DATE) (type: date)
- outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 1 Data size: 56 Basic stats:
COMPLETE Column stats: COMPLETE
+ File Output Operator
+ compressed: false
Statistics: Num rows: 1 Data size: 56 Basic stats:
COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 1 Data size: 56 Basic
stats: COMPLETE Column stats: COMPLETE
- table:
- input format:
org.apache.hadoop.mapred.TextInputFormat
- output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde:
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.tmulti
+ table:
+ input format:
org.apache.hadoop.mapred.TextInputFormat
+ output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde:
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.tmulti
Execution mode: llap
LLAP IO: no inputs
@@ -855,24 +843,20 @@ STAGE PLANS:
Statistics: Num rows: 1 Data size: 48 Basic stats:
COMPLETE Column stats: COMPLETE
function name: inline
Select Operator
- expressions: col1 (type: string), col2 (type: string),
col3 (type: string), col4 (type: int)
+ expressions: col1 (type: string), col2 (type: string),
CAST( col3 AS DATE) (type: date), col4 (type: int)
outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 1 Data size: 8 Basic stats:
COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 1 Data size: 56 Basic stats:
COMPLETE Column stats: COMPLETE
Filter Operator
predicate: enforce_constraint((_col0 is not null and
(_col3 > 0) is not false)) (type: boolean)
- Statistics: Num rows: 1 Data size: 8 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: string), _col1 (type:
string), CAST( _col2 AS DATE) (type: date), _col3 (type: int)
- outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 1 Data size: 56 Basic stats:
COMPLETE Column stats: COMPLETE
+ File Output Operator
+ compressed: false
Statistics: Num rows: 1 Data size: 56 Basic stats:
COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 1 Data size: 56 Basic
stats: COMPLETE Column stats: COMPLETE
- table:
- input format:
org.apache.hadoop.mapred.TextInputFormat
- output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde:
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.tcase
+ table:
+ input format:
org.apache.hadoop.mapred.TextInputFormat
+ output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde:
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.tcase
Execution mode: llap
LLAP IO: no inputs
@@ -1089,24 +1073,20 @@ STAGE PLANS:
Statistics: Num rows: 1 Data size: 48 Basic stats:
COMPLETE Column stats: COMPLETE
function name: inline
Select Operator
- expressions: col1 (type: string), null (type: int),
col2 (type: decimal(1,1))
+ expressions: col1 (type: string), null (type: int),
UDFToFloat(col2) (type: float)
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 1 Data size: 4 Basic stats:
COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 1 Data size: 8 Basic stats:
COMPLETE Column stats: COMPLETE
Filter Operator
- predicate: enforce_constraint((_col0 is not null and
((UDFToFloat(null) * _col2) > 10) is not false)) (type: boolean)
- Statistics: Num rows: 1 Data size: 4 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: string), _col1 (type:
int), UDFToFloat(_col2) (type: float)
- outputColumnNames: _col0, _col1, _col2
+ predicate: enforce_constraint((_col0 is not null and
((UDFToFloat(_col1) * _col2) > 10) is not false)) (type: boolean)
+ Statistics: Num rows: 1 Data size: 8 Basic stats:
COMPLETE Column stats: COMPLETE
+ File Output Operator
+ compressed: false
Statistics: Num rows: 1 Data size: 8 Basic stats:
COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 1 Data size: 8 Basic
stats: COMPLETE Column stats: COMPLETE
- table:
- input format:
org.apache.hadoop.mapred.TextInputFormat
- output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde:
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.tcast
+ table:
+ input format:
org.apache.hadoop.mapred.TextInputFormat
+ output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde:
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.tcast
Execution mode: llap
LLAP IO: no inputs
@@ -1248,24 +1228,20 @@ STAGE PLANS:
Statistics: Num rows: 1 Data size: 48 Basic stats:
COMPLETE Column stats: COMPLETE
function name: inline
Select Operator
- expressions: col1 (type: int), col2 (type:
decimal(2,1)), col3 (type: decimal(2,1)), col4 (type: boolean)
+ expressions: col1 (type: int), UDFToFloat(col2) (type:
float), CAST( col3 AS decimal(4,1)) (type: decimal(4,1)), col4 (type: boolean)
outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 1 Data size: 8 Basic stats:
COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 1 Data size: 116 Basic stats:
COMPLETE Column stats: COMPLETE
Filter Operator
predicate: enforce_constraint((_col1 is not null and
((UDFToFloat(_col2) + _col1) < (UDFToFloat(_col0) + (_col0 * _col0))) is not
false)) (type: boolean)
- Statistics: Num rows: 1 Data size: 8 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: int), UDFToFloat(_col1)
(type: float), CAST( _col2 AS decimal(4,1)) (type: decimal(4,1)), _col3 (type:
boolean)
- outputColumnNames: _col0, _col1, _col2, _col3
+ Statistics: Num rows: 1 Data size: 116 Basic stats:
COMPLETE Column stats: COMPLETE
+ File Output Operator
+ compressed: false
Statistics: Num rows: 1 Data size: 116 Basic
stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 1 Data size: 116 Basic
stats: COMPLETE Column stats: COMPLETE
- table:
- input format:
org.apache.hadoop.mapred.TextInputFormat
- output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde:
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.texpr
+ table:
+ input format:
org.apache.hadoop.mapred.TextInputFormat
+ output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde:
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.texpr
Execution mode: llap
LLAP IO: no inputs
@@ -1399,23 +1375,19 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 500 Data size: 89000 Basic stats:
COMPLETE Column stats: COMPLETE
Select Operator
- expressions: UDFToInteger(key) (type: int), CAST( key AS
decimal(5,2)) (type: decimal(5,2)), value (type: string)
+ expressions: UDFToInteger(key) (type: int), CAST( key AS
decimal(5,2)) (type: decimal(5,2)), CAST( value AS varchar(128)) (type:
varchar(128))
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 500 Data size: 103500 Basic stats:
COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 500 Data size: 164000 Basic stats:
COMPLETE Column stats: COMPLETE
Filter Operator
predicate: enforce_constraint((_col1 is not null and
(_col1 >= CAST( _col0 AS decimal(5,2))) is not false)) (type: boolean)
- Statistics: Num rows: 250 Data size: 51750 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: int), _col1 (type:
decimal(5,2)), CAST( _col2 AS varchar(128)) (type: varchar(128))
- outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 250 Data size: 82000 Basic stats:
COMPLETE Column stats: COMPLETE
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ null sort order: a
+ sort order: +
+ Map-reduce partition columns: _col0 (type: int)
Statistics: Num rows: 250 Data size: 82000 Basic
stats: COMPLETE Column stats: COMPLETE
- Reduce Output Operator
- key expressions: _col0 (type: int)
- null sort order: a
- sort order: +
- Map-reduce partition columns: _col0 (type: int)
- Statistics: Num rows: 250 Data size: 82000 Basic
stats: COMPLETE Column stats: COMPLETE
- value expressions: _col1 (type: decimal(5,2)), _col2
(type: varchar(128))
+ value expressions: _col1 (type: decimal(5,2)), _col2
(type: varchar(128))
Execution mode: vectorized, llap
LLAP IO: all inputs
Reducer 2
@@ -1517,12 +1489,12 @@ STAGE PLANS:
Limit
Number of rows: 10
Statistics: Num rows: 10 Data size: 2070 Basic stats:
COMPLETE Column stats: COMPLETE
- Filter Operator
- predicate: enforce_constraint((_col1 is not null and
(_col1 >= CAST( _col0 AS decimal(5,2))) is not false)) (type: boolean)
- Statistics: Num rows: 5 Data size: 1035 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: int), _col1 (type:
decimal(5,2)), CAST( _col2 AS varchar(128)) (type: varchar(128))
- outputColumnNames: _col0, _col1, _col2
+ Select Operator
+ expressions: _col0 (type: int), _col1 (type:
decimal(5,2)), CAST( _col2 AS varchar(128)) (type: varchar(128))
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 10 Data size: 3280 Basic stats:
COMPLETE Column stats: COMPLETE
+ Filter Operator
+ predicate: enforce_constraint((_col1 is not null and
(_col1 >= CAST( _col0 AS decimal(5,2))) is not false)) (type: boolean)
Statistics: Num rows: 5 Data size: 1640 Basic stats:
COMPLETE Column stats: COMPLETE
Reduce Output Operator
key expressions: _col0 (type: int)
@@ -1678,23 +1650,19 @@ STAGE PLANS:
Number of rows: 10
Statistics: Num rows: 10 Data size: 1734 Basic stats: COMPLETE
Column stats: COMPLETE
Select Operator
- expressions: VALUE._col0 (type: int), VALUE._col1 (type:
decimal(5,2)), VALUE._col2 (type: string)
+ expressions: VALUE._col0 (type: int), VALUE._col1 (type:
decimal(5,2)), CAST( VALUE._col2 AS varchar(128)) (type: varchar(128))
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 10 Data size: 1734 Basic stats:
COMPLETE Column stats: COMPLETE
Filter Operator
predicate: enforce_constraint((_col1 is not null and
(_col1 >= CAST( _col0 AS decimal(5,2))) is not false)) (type: boolean)
- Statistics: Num rows: 5 Data size: 923 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: int), _col1 (type:
decimal(5,2)), CAST( _col2 AS varchar(128)) (type: varchar(128))
- outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 5 Data size: 1528 Basic stats:
COMPLETE Column stats: COMPLETE
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ null sort order: a
+ sort order: +
+ Map-reduce partition columns: _col0 (type: int)
Statistics: Num rows: 5 Data size: 1528 Basic stats:
COMPLETE Column stats: COMPLETE
- Reduce Output Operator
- key expressions: _col0 (type: int)
- null sort order: a
- sort order: +
- Map-reduce partition columns: _col0 (type: int)
- Statistics: Num rows: 5 Data size: 1528 Basic stats:
COMPLETE Column stats: COMPLETE
- value expressions: _col1 (type: decimal(5,2)), _col2
(type: varchar(128))
+ value expressions: _col1 (type: decimal(5,2)), _col2
(type: varchar(128))
Reducer 4
Execution mode: vectorized, llap
Reduce Operator Tree:
@@ -1858,12 +1826,12 @@ STAGE PLANS:
Limit
Number of rows: 10
Statistics: Num rows: 10 Data size: 2070 Basic stats:
COMPLETE Column stats: COMPLETE
- Filter Operator
- predicate: enforce_constraint((_col1 is not null and
(_col1 >= CAST( _col0 AS decimal(5,2))) is not false)) (type: boolean)
- Statistics: Num rows: 5 Data size: 1035 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: int), _col1 (type:
decimal(5,2)), CAST( _col2 AS varchar(128)) (type: varchar(128))
- outputColumnNames: _col0, _col1, _col2
+ Select Operator
+ expressions: _col0 (type: int), _col1 (type:
decimal(5,2)), CAST( _col2 AS varchar(128)) (type: varchar(128))
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 10 Data size: 3280 Basic stats:
COMPLETE Column stats: COMPLETE
+ Filter Operator
+ predicate: enforce_constraint((_col1 is not null and
(_col1 >= CAST( _col0 AS decimal(5,2))) is not false)) (type: boolean)
Statistics: Num rows: 5 Data size: 1640 Basic stats:
COMPLETE Column stats: COMPLETE
Reduce Output Operator
key expressions: _col0 (type: int)
@@ -2305,23 +2273,19 @@ STAGE PLANS:
Map-reduce partition columns: UDFToInteger(_col0)
(type: int)
Statistics: Num rows: 1 Data size: 116 Basic stats:
COMPLETE Column stats: NONE
Select Operator
- expressions: _col1 (type: int), 893.14 (type:
decimal(5,2)), 'apache_hive' (type: string)
+ expressions: _col1 (type: int), 893.14 (type:
decimal(5,2)), 'apache_hive' (type: varchar(128))
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 1 Data size: 116 Basic stats:
COMPLETE Column stats: NONE
Filter Operator
predicate: enforce_constraint(_col1 is not null)
(type: boolean)
Statistics: Num rows: 1 Data size: 116 Basic stats:
COMPLETE Column stats: NONE
- Select Operator
- expressions: _col0 (type: int), _col1 (type:
decimal(5,2)), CAST( _col2 AS varchar(128)) (type: varchar(128))
- outputColumnNames: _col0, _col1, _col2
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ null sort order: a
+ sort order: +
+ Map-reduce partition columns: _col0 (type: int)
Statistics: Num rows: 1 Data size: 116 Basic
stats: COMPLETE Column stats: NONE
- Reduce Output Operator
- key expressions: _col0 (type: int)
- null sort order: a
- sort order: +
- Map-reduce partition columns: _col0 (type: int)
- Statistics: Num rows: 1 Data size: 116 Basic
stats: COMPLETE Column stats: NONE
- value expressions: _col1 (type: decimal(5,2)),
_col2 (type: varchar(128))
+ value expressions: _col1 (type: decimal(5,2)),
_col2 (type: varchar(128))
Execution mode: vectorized, llap
LLAP IO: may be used (ACID table)
Reducer 2
@@ -3613,23 +3577,19 @@ STAGE PLANS:
Statistics: Num rows: 1 Data size: 48 Basic stats:
COMPLETE Column stats: COMPLETE
function name: inline
Select Operator
- expressions: col1 (type: int), null (type: bigint)
+ expressions: UDFToByte(col1) (type: tinyint), null
(type: bigint)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 1 Data size: 8 Basic stats:
COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 1 Data size: 12 Basic stats:
COMPLETE Column stats: COMPLETE
Filter Operator
- predicate: enforce_constraint((null) IN (4, 5) is
not false) (type: boolean)
- Statistics: Num rows: 1 Data size: 8 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: UDFToByte(_col0) (type: tinyint),
_col1 (type: bigint)
- outputColumnNames: _col0, _col1
+ predicate: enforce_constraint((_col1) IN (4L, 5L) is
not false) (type: boolean)
+ Statistics: Num rows: 1 Data size: 12 Basic stats:
COMPLETE Column stats: COMPLETE
+ Reduce Output Operator
+ key expressions: _col1 (type: bigint)
+ null sort order: a
+ sort order: +
+ Map-reduce partition columns: _col1 (type: bigint)
Statistics: Num rows: 1 Data size: 12 Basic stats:
COMPLETE Column stats: COMPLETE
- Reduce Output Operator
- key expressions: _col1 (type: bigint)
- null sort order: a
- sort order: +
- Map-reduce partition columns: _col1 (type:
bigint)
- Statistics: Num rows: 1 Data size: 12 Basic
stats: COMPLETE Column stats: COMPLETE
- value expressions: _col0 (type: tinyint)
+ value expressions: _col0 (type: tinyint)
Execution mode: llap
LLAP IO: no inputs
Reducer 2
@@ -3698,23 +3658,19 @@ STAGE PLANS:
Statistics: Num rows: 1 Data size: 48 Basic stats:
COMPLETE Column stats: COMPLETE
function name: inline
Select Operator
- expressions: null (type: tinyint), col1 (type: int)
+ expressions: null (type: tinyint), UDFToLong(col1)
(type: bigint)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 1 Data size: 4 Basic stats:
COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 1 Data size: 12 Basic stats:
COMPLETE Column stats: COMPLETE
Filter Operator
- predicate: enforce_constraint((_col1) IN (4, 5) is
not false) (type: boolean)
- Statistics: Num rows: 1 Data size: 4 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: tinyint),
UDFToLong(_col1) (type: bigint)
- outputColumnNames: _col0, _col1
+ predicate: enforce_constraint((_col1) IN (4L, 5L) is
not false) (type: boolean)
+ Statistics: Num rows: 1 Data size: 12 Basic stats:
COMPLETE Column stats: COMPLETE
+ Reduce Output Operator
+ key expressions: _col1 (type: bigint)
+ null sort order: a
+ sort order: +
+ Map-reduce partition columns: _col1 (type: bigint)
Statistics: Num rows: 1 Data size: 12 Basic stats:
COMPLETE Column stats: COMPLETE
- Reduce Output Operator
- key expressions: _col1 (type: bigint)
- null sort order: a
- sort order: +
- Map-reduce partition columns: _col1 (type:
bigint)
- Statistics: Num rows: 1 Data size: 12 Basic
stats: COMPLETE Column stats: COMPLETE
- value expressions: _col0 (type: tinyint)
+ value expressions: _col0 (type: tinyint)
Execution mode: llap
LLAP IO: no inputs
Reducer 2
@@ -3841,12 +3797,12 @@ STAGE PLANS:
Limit
Number of rows: 3
Statistics: Num rows: 3 Data size: 261 Basic stats: COMPLETE
Column stats: COMPLETE
- Filter Operator
- predicate: enforce_constraint(((_col0 > 0) and (_col0 <
5000)) is not false) (type: boolean)
- Statistics: Num rows: 1 Data size: 87 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: UDFToInteger(_col0) (type: int)
- outputColumnNames: _col0
+ Select Operator
+ expressions: UDFToInteger(_col0) (type: int)
+ outputColumnNames: _col0
+ Statistics: Num rows: 3 Data size: 12 Basic stats:
COMPLETE Column stats: COMPLETE
+ Filter Operator
+ predicate: enforce_constraint(((_col0 > 0) and (_col0 <
5000)) is not false) (type: boolean)
Statistics: Num rows: 1 Data size: 4 Basic stats:
COMPLETE Column stats: COMPLETE
File Output Operator
compressed: false
diff --git a/ql/src/test/results/clientpositive/llap/default_constraint.q.out
b/ql/src/test/results/clientpositive/llap/default_constraint.q.out
index 01139354e49..a9e78172112 100644
--- a/ql/src/test/results/clientpositive/llap/default_constraint.q.out
+++ b/ql/src/test/results/clientpositive/llap/default_constraint.q.out
@@ -1902,23 +1902,19 @@ STAGE PLANS:
Statistics: Num rows: 1 Data size: ###Masked### Basic
stats: COMPLETE Column stats: COMPLETE
function name: inline
Select Operator
- expressions: 127Y (type: tinyint), 32767S (type:
smallint), 2147483647 (type: int), 9223372036854775807L (type: bigint), 3.4E38D
(type: double), col1 (type: decimal(5,2))
+ expressions: 127Y (type: tinyint), 32767S (type:
smallint), 2147483647 (type: int), 9223372036854775807L (type: bigint), 3.4E38D
(type: double), CAST( col1 AS decimal(9,2)) (type: decimal(9,2))
outputColumnNames: _col0, _col1, _col2, _col3, _col4,
_col5
Statistics: Num rows: 1 Data size: ###Masked### Basic
stats: COMPLETE Column stats: COMPLETE
Filter Operator
predicate: enforce_constraint(_col0 is not null)
(type: boolean)
Statistics: Num rows: 1 Data size: ###Masked###
Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: tinyint), _col1 (type:
smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: double), CAST(
_col5 AS decimal(9,2)) (type: decimal(9,2))
- outputColumnNames: _col0, _col1, _col2, _col3,
_col4, _col5
+ Reduce Output Operator
+ key expressions: _col1 (type: smallint)
+ null sort order: a
+ sort order: +
+ Map-reduce partition columns: _col1 (type:
smallint)
Statistics: Num rows: 1 Data size: ###Masked###
Basic stats: COMPLETE Column stats: COMPLETE
- Reduce Output Operator
- key expressions: _col1 (type: smallint)
- null sort order: a
- sort order: +
- Map-reduce partition columns: _col1 (type:
smallint)
- Statistics: Num rows: 1 Data size: ###Masked###
Basic stats: COMPLETE Column stats: COMPLETE
- value expressions: _col0 (type: tinyint), _col2
(type: int), _col3 (type: bigint), _col4 (type: double), _col5 (type:
decimal(9,2))
+ value expressions: _col0 (type: tinyint), _col2
(type: int), _col3 (type: bigint), _col4 (type: double), _col5 (type:
decimal(9,2))
Execution mode: llap
LLAP IO: no inputs
Reducer 2
@@ -2246,23 +2242,19 @@ STAGE PLANS:
Statistics: Num rows: 1 Data size: ###Masked### Basic
stats: COMPLETE Column stats: COMPLETE
function name: inline
Select Operator
- expressions: 108Y (type: tinyint), 32767S (type:
smallint), 2147483647 (type: int), 9223372036854775807L (type: bigint), 3.4E38D
(type: double), col1 (type: decimal(5,2))
+ expressions: 108Y (type: tinyint), 32767S (type:
smallint), 2147483647 (type: int), 9223372036854775807L (type: bigint), 3.4E38D
(type: double), CAST( col1 AS decimal(9,2)) (type: decimal(9,2))
outputColumnNames: _col0, _col1, _col2, _col3, _col4,
_col5
Statistics: Num rows: 1 Data size: ###Masked### Basic
stats: COMPLETE Column stats: COMPLETE
Filter Operator
predicate: enforce_constraint(_col0 is not null)
(type: boolean)
Statistics: Num rows: 1 Data size: ###Masked###
Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: tinyint), _col1 (type:
smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: double), CAST(
_col5 AS decimal(9,2)) (type: decimal(9,2))
- outputColumnNames: _col0, _col1, _col2, _col3,
_col4, _col5
+ Reduce Output Operator
+ key expressions: _col1 (type: smallint)
+ null sort order: a
+ sort order: +
+ Map-reduce partition columns: _col1 (type:
smallint)
Statistics: Num rows: 1 Data size: ###Masked###
Basic stats: COMPLETE Column stats: COMPLETE
- Reduce Output Operator
- key expressions: _col1 (type: smallint)
- null sort order: a
- sort order: +
- Map-reduce partition columns: _col1 (type:
smallint)
- Statistics: Num rows: 1 Data size: ###Masked###
Basic stats: COMPLETE Column stats: COMPLETE
- value expressions: _col0 (type: tinyint), _col2
(type: int), _col3 (type: bigint), _col4 (type: double), _col5 (type:
decimal(9,2))
+ value expressions: _col0 (type: tinyint), _col2
(type: int), _col3 (type: bigint), _col4 (type: double), _col5 (type:
decimal(9,2))
Execution mode: llap
LLAP IO: no inputs
Reducer 2
diff --git
a/ql/src/test/results/clientpositive/llap/enforce_constraint_notnull.q.out
b/ql/src/test/results/clientpositive/llap/enforce_constraint_notnull.q.out
index 5100238174e..2d9d4f6070a 100644
--- a/ql/src/test/results/clientpositive/llap/enforce_constraint_notnull.q.out
+++ b/ql/src/test/results/clientpositive/llap/enforce_constraint_notnull.q.out
@@ -2583,23 +2583,19 @@ STAGE PLANS:
Statistics: Num rows: 1 Data size: 56 Basic stats:
COMPLETE Column stats: COMPLETE
function name: inline
Select Operator
- expressions: col1 (type: int), col2 (type:
decimal(5,2)), col3 (type: string)
+ expressions: col1 (type: int), col2 (type:
decimal(5,2)), CAST( col3 AS varchar(128)) (type: varchar(128))
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 1 Data size: 8 Basic stats:
COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 1 Data size: 212 Basic stats:
COMPLETE Column stats: COMPLETE
Filter Operator
predicate: enforce_constraint((_col1 is not null and
_col2 is not null)) (type: boolean)
- Statistics: Num rows: 1 Data size: 8 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: int), _col1 (type:
decimal(5,2)), CAST( _col2 AS varchar(128)) (type: varchar(128))
- outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 1 Data size: 212 Basic stats:
COMPLETE Column stats: COMPLETE
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ null sort order: a
+ sort order: +
+ Map-reduce partition columns: _col0 (type: int)
Statistics: Num rows: 1 Data size: 212 Basic
stats: COMPLETE Column stats: COMPLETE
- Reduce Output Operator
- key expressions: _col0 (type: int)
- null sort order: a
- sort order: +
- Map-reduce partition columns: _col0 (type: int)
- Statistics: Num rows: 1 Data size: 212 Basic
stats: COMPLETE Column stats: COMPLETE
- value expressions: _col1 (type: decimal(5,2)),
_col2 (type: varchar(128))
+ value expressions: _col1 (type: decimal(5,2)),
_col2 (type: varchar(128))
Execution mode: llap
LLAP IO: no inputs
Reducer 2
@@ -2731,23 +2727,19 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 500 Data size: 89000 Basic stats:
COMPLETE Column stats: COMPLETE
Select Operator
- expressions: UDFToInteger(key) (type: int), CAST( key AS
decimal(5,2)) (type: decimal(5,2)), value (type: string)
+ expressions: UDFToInteger(key) (type: int), CAST( key AS
decimal(5,2)) (type: decimal(5,2)), CAST( value AS varchar(128)) (type:
varchar(128))
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 500 Data size: 103500 Basic stats:
COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 500 Data size: 164000 Basic stats:
COMPLETE Column stats: COMPLETE
Filter Operator
predicate: enforce_constraint((_col1 is not null and
_col2 is not null)) (type: boolean)
- Statistics: Num rows: 250 Data size: 51750 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: int), _col1 (type:
decimal(5,2)), CAST( _col2 AS varchar(128)) (type: varchar(128))
- outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 250 Data size: 82000 Basic stats:
COMPLETE Column stats: COMPLETE
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ null sort order: a
+ sort order: +
+ Map-reduce partition columns: _col0 (type: int)
Statistics: Num rows: 250 Data size: 82000 Basic
stats: COMPLETE Column stats: COMPLETE
- Reduce Output Operator
- key expressions: _col0 (type: int)
- null sort order: a
- sort order: +
- Map-reduce partition columns: _col0 (type: int)
- Statistics: Num rows: 250 Data size: 82000 Basic
stats: COMPLETE Column stats: COMPLETE
- value expressions: _col1 (type: decimal(5,2)), _col2
(type: varchar(128))
+ value expressions: _col1 (type: decimal(5,2)), _col2
(type: varchar(128))
Execution mode: vectorized, llap
LLAP IO: all inputs
Reducer 2
@@ -2885,23 +2877,19 @@ STAGE PLANS:
Number of rows: 2
Statistics: Num rows: 2 Data size: 414 Basic stats: COMPLETE
Column stats: COMPLETE
Select Operator
- expressions: VALUE._col0 (type: int), VALUE._col1 (type:
decimal(5,2)), VALUE._col2 (type: string)
+ expressions: VALUE._col0 (type: int), VALUE._col1 (type:
decimal(5,2)), CAST( VALUE._col2 AS varchar(128)) (type: varchar(128))
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 2 Data size: 414 Basic stats: COMPLETE
Column stats: COMPLETE
Filter Operator
predicate: enforce_constraint((_col1 is not null and _col2
is not null)) (type: boolean)
- Statistics: Num rows: 1 Data size: 207 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: int), _col1 (type:
decimal(5,2)), CAST( _col2 AS varchar(128)) (type: varchar(128))
- outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 1 Data size: 328 Basic stats:
COMPLETE Column stats: COMPLETE
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ null sort order: a
+ sort order: +
+ Map-reduce partition columns: _col0 (type: int)
Statistics: Num rows: 1 Data size: 328 Basic stats:
COMPLETE Column stats: COMPLETE
- Reduce Output Operator
- key expressions: _col0 (type: int)
- null sort order: a
- sort order: +
- Map-reduce partition columns: _col0 (type: int)
- Statistics: Num rows: 1 Data size: 328 Basic stats:
COMPLETE Column stats: COMPLETE
- value expressions: _col1 (type: decimal(5,2)), _col2
(type: varchar(128))
+ value expressions: _col1 (type: decimal(5,2)), _col2
(type: varchar(128))
Reducer 3
Execution mode: vectorized, llap
Reduce Operator Tree:
@@ -3034,12 +3022,12 @@ STAGE PLANS:
Limit
Number of rows: 2
Statistics: Num rows: 2 Data size: 414 Basic stats: COMPLETE
Column stats: COMPLETE
- Filter Operator
- predicate: enforce_constraint((_col1 is not null and _col2
is not null)) (type: boolean)
- Statistics: Num rows: 1 Data size: 207 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: int), _col1 (type:
decimal(5,2)), CAST( _col2 AS varchar(128)) (type: varchar(128))
- outputColumnNames: _col0, _col1, _col2
+ Select Operator
+ expressions: _col0 (type: int), _col1 (type:
decimal(5,2)), CAST( _col2 AS varchar(128)) (type: varchar(128))
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 2 Data size: 656 Basic stats:
COMPLETE Column stats: COMPLETE
+ Filter Operator
+ predicate: enforce_constraint((_col1 is not null and
_col2 is not null)) (type: boolean)
Statistics: Num rows: 1 Data size: 328 Basic stats:
COMPLETE Column stats: COMPLETE
Reduce Output Operator
key expressions: _col0 (type: int)
@@ -3205,12 +3193,12 @@ STAGE PLANS:
Limit
Number of rows: 2
Statistics: Num rows: 2 Data size: 414 Basic stats: COMPLETE
Column stats: COMPLETE
- Filter Operator
- predicate: enforce_constraint((_col1 is not null and _col2
is not null)) (type: boolean)
- Statistics: Num rows: 1 Data size: 207 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: int), _col1 (type:
decimal(5,2)), CAST( _col2 AS varchar(128)) (type: varchar(128))
- outputColumnNames: _col0, _col1, _col2
+ Select Operator
+ expressions: _col0 (type: int), _col1 (type:
decimal(5,2)), CAST( _col2 AS varchar(128)) (type: varchar(128))
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 2 Data size: 656 Basic stats:
COMPLETE Column stats: COMPLETE
+ Filter Operator
+ predicate: enforce_constraint((_col1 is not null and
_col2 is not null)) (type: boolean)
Statistics: Num rows: 1 Data size: 328 Basic stats:
COMPLETE Column stats: COMPLETE
Reduce Output Operator
key expressions: _col0 (type: int)
@@ -3323,23 +3311,19 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 500 Data size: 89000 Basic stats:
COMPLETE Column stats: COMPLETE
Select Operator
- expressions: UDFToInteger(key) (type: int), CAST( key AS
decimal(5,2)) (type: decimal(5,2)), value (type: string)
+ expressions: UDFToInteger(key) (type: int), CAST( key AS
decimal(5,2)) (type: decimal(5,2)), CAST( value AS varchar(128)) (type:
varchar(128))
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 500 Data size: 103500 Basic stats:
COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 500 Data size: 164000 Basic stats:
COMPLETE Column stats: COMPLETE
Filter Operator
predicate: enforce_constraint((_col1 is not null and
_col2 is not null)) (type: boolean)
- Statistics: Num rows: 250 Data size: 51750 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: int), _col1 (type:
decimal(5,2)), CAST( _col2 AS varchar(128)) (type: varchar(128))
- outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 250 Data size: 82000 Basic stats:
COMPLETE Column stats: COMPLETE
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ null sort order: a
+ sort order: +
+ Map-reduce partition columns: _col0 (type: int)
Statistics: Num rows: 250 Data size: 82000 Basic
stats: COMPLETE Column stats: COMPLETE
- Reduce Output Operator
- key expressions: _col0 (type: int)
- null sort order: a
- sort order: +
- Map-reduce partition columns: _col0 (type: int)
- Statistics: Num rows: 250 Data size: 82000 Basic
stats: COMPLETE Column stats: COMPLETE
- value expressions: _col1 (type: decimal(5,2)), _col2
(type: varchar(128))
+ value expressions: _col1 (type: decimal(5,2)), _col2
(type: varchar(128))
Execution mode: vectorized, llap
LLAP IO: all inputs
Reducer 2
@@ -3477,23 +3461,19 @@ STAGE PLANS:
Map-reduce partition columns: UDFToInteger(_col0)
(type: int)
Statistics: Num rows: 6 Data size: 456 Basic stats:
COMPLETE Column stats: COMPLETE
Select Operator
- expressions: _col1 (type: int), 3.14 (type:
decimal(3,2)), _col3 (type: varchar(128))
+ expressions: _col1 (type: int), 3.14 (type:
decimal(5,2)), _col3 (type: varchar(128))
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 6 Data size: 1350 Basic stats:
COMPLETE Column stats: COMPLETE
Filter Operator
predicate: enforce_constraint((_col1 is not null and
_col2 is not null)) (type: boolean)
Statistics: Num rows: 3 Data size: 675 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: int), CAST( _col1 AS
decimal(5,2)) (type: decimal(5,2)), _col2 (type: varchar(128))
- outputColumnNames: _col0, _col1, _col2
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ null sort order: a
+ sort order: +
+ Map-reduce partition columns: _col0 (type: int)
Statistics: Num rows: 3 Data size: 675 Basic
stats: COMPLETE Column stats: COMPLETE
- Reduce Output Operator
- key expressions: _col0 (type: int)
- null sort order: a
- sort order: +
- Map-reduce partition columns: _col0 (type: int)
- Statistics: Num rows: 3 Data size: 675 Basic
stats: COMPLETE Column stats: COMPLETE
- value expressions: _col1 (type: decimal(5,2)),
_col2 (type: varchar(128))
+ value expressions: _col1 (type: decimal(5,2)),
_col2 (type: varchar(128))
Execution mode: vectorized, llap
LLAP IO: may be used (ACID table)
Reducer 2
@@ -3642,23 +3622,19 @@ STAGE PLANS:
Map-reduce partition columns: UDFToInteger(_col0)
(type: int)
Statistics: Num rows: 3 Data size: 228 Basic stats:
COMPLETE Column stats: COMPLETE
Select Operator
- expressions: _col1 (type: int), 3.14159 (type:
decimal(6,5)), _col3 (type: varchar(128))
+ expressions: _col1 (type: int), 3.14 (type:
decimal(5,2)), _col3 (type: varchar(128))
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 3 Data size: 675 Basic stats:
COMPLETE Column stats: COMPLETE
Filter Operator
predicate: enforce_constraint((_col0 is not null and
_col2 is not null)) (type: boolean)
Statistics: Num rows: 1 Data size: 225 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: _col0 (type: int), CAST( _col1 AS
decimal(5,2)) (type: decimal(5,2)), _col2 (type: varchar(128))
- outputColumnNames: _col0, _col1, _col2
+ Reduce Output Operator
+ key expressions: _col0 (type: int)
+ null sort order: a
+ sort order: +
+ Map-reduce partition columns: _col0 (type: int)
Statistics: Num rows: 1 Data size: 225 Basic
stats: COMPLETE Column stats: COMPLETE
- Reduce Output Operator
- key expressions: _col0 (type: int)
- null sort order: a
- sort order: +
- Map-reduce partition columns: _col0 (type: int)
- Statistics: Num rows: 1 Data size: 225 Basic
stats: COMPLETE Column stats: COMPLETE
- value expressions: _col1 (type: decimal(5,2)),
_col2 (type: varchar(128))
+ value expressions: _col1 (type: decimal(5,2)),
_col2 (type: varchar(128))
Execution mode: vectorized, llap
LLAP IO: may be used (ACID table)
Reducer 2
@@ -3780,7 +3756,7 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 500 Data size: 89000 Basic stats:
COMPLETE Column stats: COMPLETE
Filter Operator
- predicate: ((key < 10) and
enforce_constraint((UDFToInteger(key) is not null and value is not null)))
(type: boolean)
+ predicate: ((key < 10) and
enforce_constraint((UDFToInteger(key) is not null and CAST( value AS
varchar(128)) is not null))) (type: boolean)
Statistics: Num rows: 83 Data size: 14774 Basic stats:
COMPLETE Column stats: COMPLETE
Select Operator
expressions: UDFToInteger(key) (type: int), CAST( key AS
decimal(5,2)) (type: decimal(5,2)), CAST( value AS varchar(128)) (type:
varchar(128))
@@ -6339,12 +6315,12 @@ STAGE PLANS:
Limit
Number of rows: 3
Statistics: Num rows: 3 Data size: 261 Basic stats: COMPLETE
Column stats: COMPLETE
- Filter Operator
- predicate: enforce_constraint(_col0 is not null) (type:
boolean)
- Statistics: Num rows: 1 Data size: 87 Basic stats:
COMPLETE Column stats: COMPLETE
- Select Operator
- expressions: UDFToInteger(_col0) (type: int)
- outputColumnNames: _col0
+ Select Operator
+ expressions: UDFToInteger(_col0) (type: int)
+ outputColumnNames: _col0
+ Statistics: Num rows: 3 Data size: 12 Basic stats:
COMPLETE Column stats: COMPLETE
+ Filter Operator
+ predicate: enforce_constraint(_col0 is not null) (type:
boolean)
Statistics: Num rows: 1 Data size: 4 Basic stats:
COMPLETE Column stats: COMPLETE
File Output Operator
compressed: false