cxzl25 commented on code in PR #5141:
URL: https://github.com/apache/hive/pull/5141#discussion_r1542591859
##########
standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java:
##########
@@ -1525,30 +1527,33 @@ public void visit(LeafNode node) throws MetaException {
if (isOpEquals || Operator.isNotEqualOperator(node.operator)) {
Map<String, String> partKeyToVal = new HashMap<>();
partKeyToVal.put(partCol.getName(), node.value.toString());
- String escapedNameFragment = Warehouse.makePartName(partKeyToVal,
false);
+ String escapedNameFragmentPart = Warehouse.makePartName(partKeyToVal,
false);
if (colType == FilterType.Date) {
// Some engines like Pig will record both date and time values, in
which case we need
// match PART_NAME by like clause.
- escapedNameFragment += "%";
+ escapedNameFragmentPart += "%";
}
+ StringBuilder sbFilter = new StringBuilder();
+ for (int i = 0; i < partitionKeys.size(); i++) {
+ FieldSchema col = partitionKeys.get(i);
+ if (i > 0) {
+ sbFilter.append(Path.SEPARATOR);
+ }
+ if (partCol.getName().equals(col.getName())) {
+ sbFilter.append(escapedNameFragmentPart);
+ } else {
+
sbFilter.append(FileUtils.escapePathName(col.getName().toLowerCase(), "_%"));
+ sbFilter.append('=');
+ sbFilter.append(FileUtils.escapePathName(null, "_%"));
+ }
+ }
+ String escapedNameFragment = sbFilter.toString();
if (colType != FilterType.Date && partColCount == 1) {
// Case where partition column type is not date and there is no
other partition columns
params.add(escapedNameFragment);
filter += " and " + PARTITIONS + ".\"PART_NAME\"" + (isOpEquals ? "
=? " : " !=? ");
} else {
- if (partColCount == 1) {
- // Case where partition column type is date and there is no other
partition columns
- params.add(escapedNameFragment);
- } else if (partColIndex + 1 == partColCount) {
- // Case where the partition column is at the end of the name.
- params.add("%/" + escapedNameFragment);
- } else if (partColIndex == 0) {
- // Case where the partition column is at the beginning of the name.
- params.add(escapedNameFragment + "/%");
- } else {
- // Case where the partition column is in the middle of the name.
- params.add("%/" + escapedNameFragment + "/%");
- }
+ params.add(escapedNameFragment);
filter += " and " + PARTITIONS + ".\"PART_NAME\"" + (isOpEquals ? "
like ? " : " not like ? ");
Review Comment:
```bash
cd standalone-metastore/metastore-tools/metastore-benchmarks/target/
java -jar ./hmsbench.jar -H localhost --savedata /tmp/benchdata --sanitize
-N 10 -N 1000 -N 10000 -d testbench_http -M 'getPartitionsByFilter.*'
--runMode NONACID
```
### master
`like 'date=d0/%'`
```
2024-03-28T09:06:14.035877Z 219 Query select "PARTITIONS"."PART_ID"
from "PARTITIONS" inner join "TBLS" on "PARTITIONS"."TBL_ID" = "TBLS"."TBL_ID"
and "TBLS"."TBL_NAME" = 'bench_table_csy' inner join "DBS" on
"TBLS"."DB_ID" = "DBS"."DB_ID" and "DBS"."NAME" = 'testbench_http' inner
join "PARTITION_KEY_VALS" "FILTER0" on "FILTER0"."PART_ID" =
"PARTITIONS"."PART_ID" and "FILTER0"."INTEGER_IDX" = 0 where "DBS"."CTLG_NAME"
= 'hive' and (("FILTER0"."PART_KEY_VAL" = 'd0' and "PARTITIONS"."PART_NAME"
like 'date=d0/%' )) order by "PART_NAME" asc
```
### PR
`like 'date=d0/hour=_%'`
```
2024-03-28T09:07:28.417080Z 231 Query select "PARTITIONS"."PART_ID"
from "PARTITIONS" inner join "TBLS" on "PARTITIONS"."TBL_ID" = "TBLS"."TBL_ID"
and "TBLS"."TBL_NAME" = 'bench_table_csy' inner join "DBS" on
"TBLS"."DB_ID" = "DBS"."DB_ID" and "DBS"."NAME" = 'testbench_http' inner
join "PARTITION_KEY_VALS" "FILTER0" on "FILTER0"."PART_ID" =
"PARTITIONS"."PART_ID" and "FILTER0"."INTEGER_IDX" = 0 where "DBS"."CTLG_NAME"
= 'hive' and (("FILTER0"."PART_KEY_VAL" = 'd0' and "PARTITIONS"."PART_NAME"
like 'date=d0/hour=_%' )) order by "PART_NAME" asc
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]