This is an automated email from the ASF dual-hosted git repository.
dkuzmenko pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 4c149361fdf HIVE-27775: DirectSQL and JDO results are different when
fetching partitions by timestamp in DST shift (Zhihua Deng, reviewed by Denys
Kuzmenko)
4c149361fdf is described below
commit 4c149361fdff851bd824c1abbd11b4b0f98974d5
Author: dengzh <[email protected]>
AuthorDate: Fri Mar 1 20:57:30 2024 +0800
HIVE-27775: DirectSQL and JDO results are different when fetching
partitions by timestamp in DST shift (Zhihua Deng, reviewed by Denys Kuzmenko)
Closes #4959
---
.../ql/metadata/SessionHiveMetaStoreClient.java | 5 +-
.../hadoop/hive/ql/parse/BaseSemanticAnalyzer.java | 3 +-
.../queries/clientpositive/partition_timestamp3.q | 6 +
.../clientpositive/llap/partition_timestamp3.q.out | 48 ++++++
.../hive/metastore/utils/MetaStoreUtils.java | 10 ++
.../hadoop/hive/metastore/DatabaseProduct.java | 4 +-
.../hadoop/hive/metastore/MetaStoreDirectSql.java | 60 +++----
.../apache/hadoop/hive/metastore/ObjectStore.java | 5 +-
.../hive/metastore/parser/ExpressionTree.java | 182 +++++++++++----------
.../hive/metastore/parser/PartFilterVisitor.java | 12 +-
.../hive/metastore/TestPartFilterExprUtil.java | 16 +-
11 files changed, 210 insertions(+), 141 deletions(-)
diff --git
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
index 0e3dfb281b4..ce725a5cdb3 100644
---
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
+++
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
@@ -1705,10 +1705,9 @@ public class SessionHiveMetaStoreClient extends
HiveMetaStoreClientWithLocalCach
assert table != null;
ExpressionTree.FilterBuilder filterBuilder = new
ExpressionTree.FilterBuilder(true);
Map<String, Object> params = new HashMap<>();
- exprTree.generateJDOFilterFragment(conf, params, filterBuilder,
table.getPartitionKeys());
+ exprTree.accept(new ExpressionTree.JDOFilterGenerator(conf,
+ table.getPartitionKeys(), filterBuilder, params));
StringBuilder stringBuilder = new StringBuilder(filterBuilder.getFilter());
- // replace leading &&
- stringBuilder.replace(0, 4, "");
params.entrySet().stream().forEach(e -> {
int index = stringBuilder.indexOf(e.getKey());
stringBuilder.replace(index, index + e.getKey().length(), "\"" +
e.getValue().toString() + "\"");
diff --git
a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index 773cafd01c6..54b6587ba99 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -1803,8 +1803,7 @@ public abstract class BaseSemanticAnalyzer {
throw new SemanticException("Unexpected date type " +
colValue.getClass());
}
try {
- return MetaStoreUtils.convertDateToString(
- MetaStoreUtils.convertStringToDate(value.toString()));
+ return MetaStoreUtils.normalizeDate(value.toString());
} catch (Exception e) {
throw new SemanticException(e);
}
diff --git a/ql/src/test/queries/clientpositive/partition_timestamp3.q
b/ql/src/test/queries/clientpositive/partition_timestamp3.q
new file mode 100644
index 00000000000..b408848d622
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/partition_timestamp3.q
@@ -0,0 +1,6 @@
+--! qt:timezone:Europe/Paris
+DROP TABLE IF EXISTS payments;
+CREATE EXTERNAL TABLE payments (card string) PARTITIONED BY(txn_datetime
TIMESTAMP) STORED AS ORC;
+INSERT into payments VALUES('3333-4444-2222-9999', '2023-03-26 02:30:00'),
('3333-4444-2222-9999', '2023-03-26 03:30:00');
+SELECT * FROM payments WHERE txn_datetime = '2023-03-26 02:30:00';
+SELECT * FROM payments WHERE txn_datetime = '2023-03-26 03:30:00';
diff --git a/ql/src/test/results/clientpositive/llap/partition_timestamp3.q.out
b/ql/src/test/results/clientpositive/llap/partition_timestamp3.q.out
new file mode 100644
index 00000000000..847ec070fab
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/partition_timestamp3.q.out
@@ -0,0 +1,48 @@
+PREHOOK: query: DROP TABLE IF EXISTS payments
+PREHOOK: type: DROPTABLE
+PREHOOK: Output: database:default
+POSTHOOK: query: DROP TABLE IF EXISTS payments
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: database:default
+PREHOOK: query: CREATE EXTERNAL TABLE payments (card string) PARTITIONED
BY(txn_datetime TIMESTAMP) STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@payments
+POSTHOOK: query: CREATE EXTERNAL TABLE payments (card string) PARTITIONED
BY(txn_datetime TIMESTAMP) STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@payments
+PREHOOK: query: INSERT into payments VALUES('3333-4444-2222-9999', '2023-03-26
02:30:00'), ('3333-4444-2222-9999', '2023-03-26 03:30:00')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@payments
+POSTHOOK: query: INSERT into payments VALUES('3333-4444-2222-9999',
'2023-03-26 02:30:00'), ('3333-4444-2222-9999', '2023-03-26 03:30:00')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@payments
+POSTHOOK: Output: default@payments@txn_datetime=2023-03-26 02%3A30%3A00
+POSTHOOK: Output: default@payments@txn_datetime=2023-03-26 03%3A30%3A00
+POSTHOOK: Lineage: payments PARTITION(txn_datetime=2023-03-26 02:30:00).card
SCRIPT []
+POSTHOOK: Lineage: payments PARTITION(txn_datetime=2023-03-26 03:30:00).card
SCRIPT []
+PREHOOK: query: SELECT * FROM payments WHERE txn_datetime = '2023-03-26
02:30:00'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@payments
+PREHOOK: Input: default@payments@txn_datetime=2023-03-26 02%3A30%3A00
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM payments WHERE txn_datetime = '2023-03-26
02:30:00'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@payments
+POSTHOOK: Input: default@payments@txn_datetime=2023-03-26 02%3A30%3A00
+#### A masked pattern was here ####
+3333-4444-2222-9999 2023-03-26 02:30:00
+PREHOOK: query: SELECT * FROM payments WHERE txn_datetime = '2023-03-26
03:30:00'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@payments
+PREHOOK: Input: default@payments@txn_datetime=2023-03-26 03%3A30%3A00
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM payments WHERE txn_datetime = '2023-03-26
03:30:00'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@payments
+POSTHOOK: Input: default@payments@txn_datetime=2023-03-26 03%3A30%3A00
+#### A masked pattern was here ####
+3333-4444-2222-9999 2023-03-26 03:30:00
diff --git
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java
index 1aa98d9f1d1..c12c009b81c 100644
---
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java
+++
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java
@@ -107,6 +107,16 @@ public class MetaStoreUtils {
return java.sql.Date.valueOf(val);
}
+ /**
+ * Converts the string format date without a time-zone to
+ * a time-zone based string format date
+ * @param date the date without a time-zone
+ * @return time-zone based string format date
+ */
+ public static String normalizeDate(String date) {
+ return convertDateToString(convertStringToDate(date));
+ }
+
/**
* Converts java.sql.Timestamp to string format timestamp.
* @param timestamp java.sql.Timestamp object.
diff --git
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/DatabaseProduct.java
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/DatabaseProduct.java
index dd0f2434207..865365722fc 100644
---
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/DatabaseProduct.java
+++
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/DatabaseProduct.java
@@ -270,7 +270,9 @@ public class DatabaseProduct implements Configurable {
protected String toTimestamp(String tableValue) {
if (isORACLE()) {
- return "TO_TIMESTAMP(" + tableValue + ", 'YYYY-MM-DD HH:mm:ss')";
+ return "TO_TIMESTAMP(" + tableValue + ", 'YYYY-MM-DD HH24:mi:ss')";
+ } else if (isSQLSERVER()) {
+ return "CONVERT(DATETIME, " + tableValue + ")";
} else {
return "cast(" + tableValue + " as TIMESTAMP)";
}
diff --git
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
index 515721791bb..c453df0ea1b 100644
---
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
+++
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
@@ -30,13 +30,10 @@ import static
org.apache.hadoop.hive.metastore.ColumnType.STRING_TYPE_NAME;
import static org.apache.hadoop.hive.metastore.ColumnType.TIMESTAMP_TYPE_NAME;
import static org.apache.hadoop.hive.metastore.ColumnType.TINYINT_TYPE_NAME;
import static org.apache.hadoop.hive.metastore.ColumnType.VARCHAR_TYPE_NAME;
-import static
org.apache.hadoop.hive.metastore.utils.FileUtils.unescapePathName;
import java.sql.Connection;
-import java.sql.Date;
import java.sql.SQLException;
import java.sql.Statement;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -1405,7 +1402,7 @@ class MetaStoreDirectSql {
@Override
public void visit(LeafNode node) throws MetaException {
int partColCount = partitionKeys.size();
- int partColIndex = node.getPartColIndexForFilter(partitionKeys,
filterBuffer);
+ int partColIndex = LeafNode.getPartColIndexForFilter(node.keyName,
partitionKeys, filterBuffer);
if (filterBuffer.hasError()) {
return;
}
@@ -1424,30 +1421,33 @@ class MetaStoreDirectSql {
return;
}
+ String nodeValue0 = "?";
// if Filter.g does date parsing for quoted strings, we'd need to verify
there's no
// type mismatch when string col is filtered by a string that looks like
date.
- if (colType == FilterType.Date && valType == FilterType.String) {
- // Filter.g cannot parse a quoted date; try to parse date here too.
+ if (colType == FilterType.Date) {
try {
- nodeValue = MetaStoreUtils.convertStringToDate((String)nodeValue);
+ nodeValue = MetaStoreUtils.normalizeDate((String) nodeValue);
valType = FilterType.Date;
- } catch (Exception pe) { // do nothing, handled below - types will
mismatch
+ if (dbType.isPOSTGRES() || dbType.isORACLE()) {
+ nodeValue0 = "date '" + nodeValue + "'";
+ nodeValue = null;
+ }
+ } catch (Exception e) { // do nothing, handled below - types will
mismatch
+ }
+ } else if (colType == FilterType.Timestamp) {
+ if (dbType.isDERBY() || dbType.isMYSQL()) {
+ filterBuffer.setError("Filter pushdown on timestamp not supported
for " + dbType.dbType);
+ return;
+ }
+ try {
+ MetaStoreUtils.convertStringToTimestamp((String) nodeValue);
+ valType = FilterType.Timestamp;
+ if (dbType.isPOSTGRES() || dbType.isORACLE()) {
+ nodeValue0 = "timestamp '" + nodeValue + "'";
+ nodeValue = null;
+ }
+ } catch (Exception e) { //nodeValue could be
'__HIVE_DEFAULT_PARTITION__'
}
- }
-
- if (colType == FilterType.Timestamp && valType == FilterType.String) {
- // timestamp value may be escaped in client side, so we need unescape
it here.
- nodeValue =
MetaStoreUtils.convertStringToTimestamp(unescapePathName((String) nodeValue));
- valType = FilterType.Timestamp;
- }
-
- // We format it so we are sure we are getting the right value
- if (valType == FilterType.Date) {
- // Format
- nodeValue = MetaStoreUtils.convertDateToString((Date)nodeValue);
- } else if (valType == FilterType.Timestamp) {
- //format
- nodeValue = MetaStoreUtils.convertTimestampToString((Timestamp)
nodeValue);
}
boolean isDefaultPartition = (valType == FilterType.String) &&
defaultPartName.equals(nodeValue);
@@ -1477,8 +1477,7 @@ class MetaStoreDirectSql {
// Build the filter and add parameters linearly; we are traversing leaf
nodes LTR.
String tableValue = "\"FILTER" + partColIndex + "\".\"PART_KEY_VAL\"";
- String nodeValue0 = "?";
- if (node.isReverseOrder) {
+ if (node.isReverseOrder && nodeValue != null) {
params.add(nodeValue);
}
String tableColumn = tableValue;
@@ -1508,14 +1507,9 @@ class MetaStoreDirectSql {
params.add(catName.toLowerCase());
}
tableValue += " then " + tableValue0 + " else null end)";
-
- if (valType == FilterType.Date) {
- tableValue = dbType.toDate(tableValue);
- } else if (valType == FilterType.Timestamp) {
- tableValue = dbType.toTimestamp(tableValue);
- }
}
- if (!node.isReverseOrder) {
+
+ if (!node.isReverseOrder && nodeValue != null) {
params.add(nodeValue);
}
@@ -1530,7 +1524,7 @@ class MetaStoreDirectSql {
boolean isOpEquals = Operator.isEqualOperator(node.operator);
if (isOpEquals || Operator.isNotEqualOperator(node.operator)) {
Map<String, String> partKeyToVal = new HashMap<>();
- partKeyToVal.put(partCol.getName(), nodeValue.toString());
+ partKeyToVal.put(partCol.getName(), node.value.toString());
String escapedNameFragment = Warehouse.makePartName(partKeyToVal,
false);
if (colType == FilterType.Date) {
// Some engines like Pig will record both date and time values, in
which case we need
diff --git
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index 960f71803f4..a810c9cc695 100644
---
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -4914,7 +4914,8 @@ public class ObjectStore implements RawStore,
Configurable {
params.put("catName", catName);
}
- tree.generateJDOFilterFragment(getConf(), params, queryBuilder, table !=
null ? table.getPartitionKeys() : null);
+ tree.accept(new ExpressionTree.JDOFilterGenerator(getConf(),
+ table != null ? table.getPartitionKeys() : null, queryBuilder,
params));
if (queryBuilder.hasError()) {
assert !isValidatedFilter;
LOG.debug("JDO filter pushdown cannot be used: {}",
queryBuilder.getErrorMessage());
@@ -4934,7 +4935,7 @@ public class ObjectStore implements RawStore,
Configurable {
params.put("t1", tblName);
params.put("t2", dbName);
params.put("t3", catName);
- tree.generateJDOFilterFragment(getConf(), params, queryBuilder,
partitionKeys);
+ tree.accept(new ExpressionTree.JDOFilterGenerator(getConf(),
partitionKeys, queryBuilder, params));
if (queryBuilder.hasError()) {
assert !isValidatedFilter;
LOG.debug("JDO filter pushdown cannot be used: {}",
queryBuilder.getErrorMessage());
diff --git
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java
index 2e325a4e431..07acf2adcd6 100644
---
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java
+++
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java
@@ -34,6 +34,7 @@ import
org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
+import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
/**
@@ -227,34 +228,6 @@ public class ExpressionTree {
visitor.visit(this);
}
- /**
- * Generates a JDO filter statement
- * @param params
- * A map of parameter key to values for the filter statement.
- * @param filterBuffer The filter builder that is used to build filter.
- * @param partitionKeys
- * @throws MetaException
- */
- public void generateJDOFilter(Configuration conf,
- Map<String, Object> params, FilterBuilder
filterBuffer, List<FieldSchema> partitionKeys) throws MetaException {
- if (filterBuffer.hasError()) return;
- if (lhs != null) {
- filterBuffer.append (" (");
- lhs.generateJDOFilter(conf, params, filterBuffer, partitionKeys);
-
- if (rhs != null) {
- if( andOr == LogicalOperator.AND ) {
- filterBuffer.append(" && ");
- } else {
- filterBuffer.append(" || ");
- }
-
- rhs.generateJDOFilter(conf, params, filterBuffer, partitionKeys);
- }
- filterBuffer.append (") ");
- }
- }
-
@Override
public String toString() {
return "TreeNode{" +
@@ -271,10 +244,11 @@ public class ExpressionTree {
public static class LeafNode extends TreeNode {
public String keyName;
public Operator operator;
- /** Constant expression side of the operator. Can currently be a String or
a Long. */
+ /**
+ * Constant expression side of the operator. Can currently be a String or
a Long.
+ */
public Object value;
public boolean isReverseOrder = false;
- private static final String PARAM_PREFIX = "hive_filter_param_";
@Override
protected void accept(TreeVisitor visitor) throws MetaException {
@@ -282,8 +256,92 @@ public class ExpressionTree {
}
@Override
- public void generateJDOFilter(Configuration conf, Map<String, Object>
params,
- FilterBuilder filterBuilder,
List<FieldSchema> partitionKeys) throws MetaException {
+ public String toString() {
+ return "LeafNode{" +
+ "keyName='" + keyName + '\'' +
+ ", operator='" + operator + '\'' +
+ ", value=" + value +
+ (isReverseOrder ? ", isReverseOrder=true" : "") +
+ '}';
+ }
+
+ /**
+ * Get partition column index in the table partition column list that
+ * corresponds to the key that is being filtered on by this tree node.
+ * @param partitionKeys list of partition keys.
+ * @param filterBuilder filter builder used to report error, if any.
+ * @return The index.
+ */
+ public static int getPartColIndexForFilter(String partitionKeyName,
+ List<FieldSchema> partitionKeys, FilterBuilder filterBuilder) throws
MetaException {
+ int partitionColumnIndex = Iterables.indexOf(partitionKeys, key ->
partitionKeyName.equalsIgnoreCase(key.getName()));
+ if( partitionColumnIndex < 0) {
+ filterBuilder.setError("Specified key <" + partitionKeyName +
+ "> is not a partitioning key for the table");
+ return -1;
+ }
+ return partitionColumnIndex;
+ }
+ }
+
+ /**
+ * Generate the JDOQL filter for the given expression tree
+ */
+ public static class JDOFilterGenerator extends TreeVisitor {
+
+ private static final String PARAM_PREFIX = "hive_filter_param_";
+
+ private Configuration conf;
+ private List<FieldSchema> partitionKeys;
+ // the filter builder to append to.
+ private FilterBuilder filterBuilder;
+ // the input map which is updated with the the parameterized values.
+ // Keys are the parameter names and values are the parameter values
+ private Map<String, Object> params;
+ private boolean onParsing = false;
+ private String keyName;
+ private Object value;
+ private Operator operator;
+ private boolean isReverseOrder;
+
+ public JDOFilterGenerator(Configuration conf, List<FieldSchema>
partitionKeys,
+ FilterBuilder filterBuilder, Map<String, Object> params) {
+ this.conf = conf;
+ this.partitionKeys = partitionKeys;
+ this.filterBuilder = filterBuilder;
+ this.params = params;
+ }
+
+ private void beforeParsing() throws MetaException {
+ if (!onParsing && !filterBuilder.getFilter().isEmpty()) {
+ filterBuilder.append(" && ");
+ }
+ onParsing = true;
+ }
+
+ @Override
+ protected void beginTreeNode(TreeNode node) throws MetaException {
+ beforeParsing();
+ filterBuilder.append("( ");
+ }
+
+ @Override
+ protected void midTreeNode(TreeNode node) throws MetaException {
+ filterBuilder.append((node.getAndOr() == LogicalOperator.AND) ? " && " :
" || ");
+ }
+
+ @Override
+ protected void endTreeNode(TreeNode node) throws MetaException {
+ filterBuilder.append(") ");
+ }
+
+ @Override
+ protected void visit(LeafNode node) throws MetaException {
+ beforeParsing();
+ keyName = node.keyName;
+ operator = node.operator;
+ value = node.value;
+ isReverseOrder = node.isReverseOrder;
if (partitionKeys != null) {
generateJDOFilterOverPartitions(conf, params, filterBuilder,
partitionKeys);
} else {
@@ -291,6 +349,11 @@ public class ExpressionTree {
}
}
+ @Override
+ protected boolean shouldStop() {
+ return filterBuilder.hasError();
+ }
+
//can only support "=" and "!=" for now, because our JDO lib is buggy when
// using objects from map.get()
private static final Set<Operator> TABLE_FILTER_OPS = Sets.newHashSet(
@@ -368,7 +431,7 @@ public class ExpressionTree {
private void generateJDOFilterOverPartitions(Configuration conf,
Map<String, Object> params,
FilterBuilder filterBuilder, List<FieldSchema> partitionKeys) throws
MetaException {
int partitionColumnCount = partitionKeys.size();
- int partitionColumnIndex = getPartColIndexForFilter(partitionKeys,
filterBuilder);
+ int partitionColumnIndex = LeafNode.getPartColIndexForFilter(keyName,
partitionKeys, filterBuilder);
if (filterBuilder.hasError()) return;
boolean canPushDownIntegral =
@@ -442,32 +505,6 @@ public class ExpressionTree {
|| (operator == Operator.NOTEQUALS2);
}
- /**
- * Get partition column index in the table partition column list that
- * corresponds to the key that is being filtered on by this tree node.
- * @param partitionKeys list of partition keys.
- * @param filterBuilder filter builder used to report error, if any.
- * @return The index.
- */
- public int getPartColIndexForFilter(
- List<FieldSchema> partitionKeys, FilterBuilder filterBuilder) throws
MetaException {
- assert (partitionKeys.size() > 0);
- int partitionColumnIndex;
- for (partitionColumnIndex = 0; partitionColumnIndex <
partitionKeys.size();
- ++partitionColumnIndex) {
- if
(partitionKeys.get(partitionColumnIndex).getName().equalsIgnoreCase(keyName)) {
- break;
- }
- }
- if( partitionColumnIndex == partitionKeys.size()) {
- filterBuilder.setError("Specified key <" + keyName +
- "> is not a partitioning key for the table");
- return -1;
- }
-
- return partitionColumnIndex;
- }
-
/**
* Validates and gets the query parameter for JDO filter pushdown based on
the column
* and the constant stored in this node.
@@ -507,16 +544,6 @@ public class ExpressionTree {
return isStringValue ? (String)val : Long.toString((Long)val);
}
-
- @Override
- public String toString() {
- return "LeafNode{" +
- "keyName='" + keyName + '\'' +
- ", operator='" + operator + '\'' +
- ", value=" + value +
- (isReverseOrder ? ", isReverseOrder=true" : "") +
- '}';
- }
}
public void accept(TreeVisitor treeVisitor) throws MetaException {
@@ -626,21 +653,4 @@ public class ExpressionTree {
nodeStack.push(newNode);
}
- /** Generate the JDOQL filter for the given expression tree
- * @param params the input map which is updated with the
- * the parameterized values. Keys are the parameter names and values
- * are the parameter values
- * @param filterBuilder the filter builder to append to.
- * @param partitionKeys
- */
- public void generateJDOFilterFragment(Configuration conf,
- Map<String, Object> params,
FilterBuilder filterBuilder, List<FieldSchema> partitionKeys) throws
MetaException {
- if (root == null) {
- return;
- }
-
- filterBuilder.append(" && ( ");
- root.generateJDOFilter(conf, params, filterBuilder, partitionKeys);
- filterBuilder.append(" )");
- }
}
diff --git
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/parser/PartFilterVisitor.java
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/parser/PartFilterVisitor.java
index 5d68d593c83..1ce0b27eaba 100644
---
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/parser/PartFilterVisitor.java
+++
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/parser/PartFilterVisitor.java
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.hive.metastore.parser;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.time.format.DateTimeParseException;
import java.util.ArrayList;
import java.util.List;
@@ -231,25 +229,27 @@ public class PartFilterVisitor extends
PartitionFilterBaseVisitor<Object> {
}
@Override
- public Date visitDateLiteral(PartitionFilterParser.DateLiteralContext ctx) {
+ public String visitDateLiteral(PartitionFilterParser.DateLiteralContext ctx)
{
PartitionFilterParser.DateContext date = ctx.date();
String dateValue = unquoteString(date.value.getText());
try {
- return MetaStoreUtils.convertStringToDate(dateValue);
+ MetaStoreUtils.convertStringToDate(dateValue);
} catch (DateTimeParseException e) {
throw new ParseCancellationException(e.getMessage());
}
+ return dateValue;
}
@Override
- public Timestamp
visitTimestampLiteral(PartitionFilterParser.TimestampLiteralContext ctx) {
+ public String
visitTimestampLiteral(PartitionFilterParser.TimestampLiteralContext ctx) {
PartitionFilterParser.TimestampContext timestamp = ctx.timestamp();
String timestampValue = unquoteString(timestamp.value.getText());
try {
- return MetaStoreUtils.convertStringToTimestamp(timestampValue);
+ MetaStoreUtils.convertStringToTimestamp(timestampValue);
} catch (DateTimeParseException e) {
throw new ParseCancellationException(e.getMessage());
}
+ return timestampValue;
}
@Override
diff --git
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestPartFilterExprUtil.java
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestPartFilterExprUtil.java
index df1666ae530..f4a7cfd58da 100644
---
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestPartFilterExprUtil.java
+++
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestPartFilterExprUtil.java
@@ -104,25 +104,25 @@ public class TestPartFilterExprUtil {
@Test
public void
testSingleColInExpressionWhenTimestampLiteralTypeIsNotSpecifiedNorQuoted()
throws MetaException {
checkFilter("(dt) IN (2000-01-01 01:00:00, 2000-01-01 01:42:00)",
- "TreeNode{lhs=LeafNode{keyName='dt', operator='=', value=2000-01-01
01:00:00.0}, andOr='OR', rhs=LeafNode{keyName='dt', operator='=',
value=2000-01-01 01:42:00.0}}");
+ "TreeNode{lhs=LeafNode{keyName='dt', operator='=', value=2000-01-01
01:00:00}, andOr='OR', rhs=LeafNode{keyName='dt', operator='=',
value=2000-01-01 01:42:00}}");
}
@Test
public void testSingleColInExpressionWhenTimestampLiteralTypeIsSpecified()
throws MetaException {
checkFilter("(j) IN (TIMESTAMP'2000-01-01 01:00:00', TIMESTAMP'2000-01-01
01:42:00')",
- "TreeNode{lhs=LeafNode{keyName='j', operator='=', value=2000-01-01
01:00:00.0}, andOr='OR', rhs=LeafNode{keyName='j', operator='=',
value=2000-01-01 01:42:00.0}}");
+ "TreeNode{lhs=LeafNode{keyName='j', operator='=', value=2000-01-01
01:00:00}, andOr='OR', rhs=LeafNode{keyName='j', operator='=', value=2000-01-01
01:42:00}}");
}
@Test
public void
testMultiColInExpressionWhenTimestampLiteralTypeIsNotSpecifiedNorQuoted()
throws MetaException {
checkFilter("(struct(ds1,ds2)) IN (struct(2000-05-08 01:00:00, 2001-04-08
01:00:00), struct(2000-05-09 01:00:00, 2001-04-09 01:00:00))",
- "TreeNode{lhs=TreeNode{lhs=LeafNode{keyName='ds1', operator='=',
value=2000-05-08 01:00:00.0}, andOr='AND', rhs=LeafNode{keyName='ds2',
operator='=', value=2001-04-08 01:00:00.0}}, andOr='OR',
rhs=TreeNode{lhs=LeafNode{keyName='ds1', operator='=', value=2000-05-09
01:00:00.0}, andOr='AND', rhs=LeafNode{keyName='ds2', operator='=',
value=2001-04-09 01:00:00.0}}}");
+ "TreeNode{lhs=TreeNode{lhs=LeafNode{keyName='ds1', operator='=',
value=2000-05-08 01:00:00}, andOr='AND', rhs=LeafNode{keyName='ds2',
operator='=', value=2001-04-08 01:00:00}}, andOr='OR',
rhs=TreeNode{lhs=LeafNode{keyName='ds1', operator='=', value=2000-05-09
01:00:00}, andOr='AND', rhs=LeafNode{keyName='ds2', operator='=',
value=2001-04-09 01:00:00}}}");
}
@Test
public void testMultiColInExpressionWhenTimestampLiteralTypeIsSpecified()
throws MetaException {
checkFilter("(struct(ds1,ds2)) IN (struct(TIMESTAMP'2000-05-08
01:00:00',TIMESTAMP'2001-04-08 01:00:00'), struct(TIMESTAMP'2000-05-09
01:00:00',TIMESTAMP'2001-04-09 01:00:00'))",
- "TreeNode{lhs=TreeNode{lhs=LeafNode{keyName='ds1', operator='=',
value=2000-05-08 01:00:00.0}, andOr='AND', rhs=LeafNode{keyName='ds2',
operator='=', value=2001-04-08 01:00:00.0}}, andOr='OR',
rhs=TreeNode{lhs=LeafNode{keyName='ds1', operator='=', value=2000-05-09
01:00:00.0}, andOr='AND', rhs=LeafNode{keyName='ds2', operator='=',
value=2001-04-09 01:00:00.0}}}");
+ "TreeNode{lhs=TreeNode{lhs=LeafNode{keyName='ds1', operator='=',
value=2000-05-08 01:00:00}, andOr='AND', rhs=LeafNode{keyName='ds2',
operator='=', value=2001-04-08 01:00:00}}, andOr='OR',
rhs=TreeNode{lhs=LeafNode{keyName='ds1', operator='=', value=2000-05-09
01:00:00}, andOr='AND', rhs=LeafNode{keyName='ds2', operator='=',
value=2001-04-09 01:00:00}}}");
}
@Test
@@ -140,13 +140,13 @@ public class TestPartFilterExprUtil {
@Test
public void
testBetweenExpressionWhenTimestampLiteralTypeIsNotSpecifiedNorQuoted() throws
MetaException {
checkFilter("dt BETWEEN 2000-01-01 01:00:00 AND 2000-01-01 01:42:00)",
- "TreeNode{lhs=LeafNode{keyName='dt', operator='>=', value=2000-01-01
01:00:00.0}, andOr='AND', rhs=LeafNode{keyName='dt', operator='<=',
value=2000-01-01 01:42:00.0}}");
+ "TreeNode{lhs=LeafNode{keyName='dt', operator='>=', value=2000-01-01
01:00:00}, andOr='AND', rhs=LeafNode{keyName='dt', operator='<=',
value=2000-01-01 01:42:00}}");
}
@Test
public void testBetweenExpressionWhenTimestampLiteralTypeIsSpecified()
throws MetaException {
checkFilter("dt BETWEEN TIMESTAMP'2000-01-01 01:00:00' AND
TIMESTAMP'2000-01-01 01:42:00')",
- "TreeNode{lhs=LeafNode{keyName='dt', operator='>=', value=2000-01-01
01:00:00.0}, andOr='AND', rhs=LeafNode{keyName='dt', operator='<=',
value=2000-01-01 01:42:00.0}}");
+ "TreeNode{lhs=LeafNode{keyName='dt', operator='>=', value=2000-01-01
01:00:00}, andOr='AND', rhs=LeafNode{keyName='dt', operator='<=',
value=2000-01-01 01:42:00}}");
}
@Test
@@ -164,13 +164,13 @@ public class TestPartFilterExprUtil {
@Test
public void
testBinaryExpressionWhenTimeStampLiteralTypeIsNotSpecifiedNorQuoted() throws
MetaException {
checkFilter("(j = 1990-11-10 01:00:00 or j = 1990-11-11 01:00:24 and j =
1990-11-12 01:42:00)",
- "TreeNode{lhs=LeafNode{keyName='j', operator='=', value=1990-11-10
01:00:00.0}, andOr='OR', rhs=TreeNode{lhs=LeafNode{keyName='j', operator='=',
value=1990-11-11 01:00:24.0}, andOr='AND', rhs=LeafNode{keyName='j',
operator='=', value=1990-11-12 01:42:00.0}}}");
+ "TreeNode{lhs=LeafNode{keyName='j', operator='=', value=1990-11-10
01:00:00}, andOr='OR', rhs=TreeNode{lhs=LeafNode{keyName='j', operator='=',
value=1990-11-11 01:00:24}, andOr='AND', rhs=LeafNode{keyName='j',
operator='=', value=1990-11-12 01:42:00}}}");
}
@Test
public void testBinaryExpressionWhenTimeStampLiteralTypeIsSpecified() throws
MetaException {
checkFilter("(j = TIMESTAMP'1990-11-10 01:00:00' or j =
TIMESTAMP'1990-11-11 01:00:24' and j = TIMESTAMP'1990-11-12 01:42:00')",
- "TreeNode{lhs=LeafNode{keyName='j', operator='=', value=1990-11-10
01:00:00.0}, andOr='OR', rhs=TreeNode{lhs=LeafNode{keyName='j', operator='=',
value=1990-11-11 01:00:24.0}, andOr='AND', rhs=LeafNode{keyName='j',
operator='=', value=1990-11-12 01:42:00.0}}}");
+ "TreeNode{lhs=LeafNode{keyName='j', operator='=', value=1990-11-10
01:00:00}, andOr='OR', rhs=TreeNode{lhs=LeafNode{keyName='j', operator='=',
value=1990-11-11 01:00:24}, andOr='AND', rhs=LeafNode{keyName='j',
operator='=', value=1990-11-12 01:42:00}}}");
}