lirui-apache commented on a change in pull request #10325: [FLINK-14512][table] 
Introduce listPartitionsByFilter to Catalog
URL: https://github.com/apache/flink/pull/10325#discussion_r352442401
 
 

 ##########
 File path: 
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/rules/logical/PushPartitionIntoTableSourceScanRule.scala
 ##########
 @@ -94,14 +104,47 @@ class PushPartitionIntoTableSourceScanRule extends 
RelOptRule(
       inputFieldType.getFieldList.get(index).getType
     }.map(FlinkTypeFactory.toLogicalType)
 
-    val allPartitions = tableSource.getPartitions
-    val remainingPartitions = PartitionPruner.prunePartitions(
-      call.getPlanner.getContext.asInstanceOf[FlinkContext].getTableConfig,
-      partitionFieldNames,
-      partitionFieldTypes,
-      allPartitions,
-      finalPartitionPredicate
-    )
+    def getAllPartitions: util.List[util.Map[String, String]] = {
+      catalog match {
+        case Some(c) =>
+          c.listPartitions(tableIdentifier.get.toObjectPath)
+            .map(_.getPartitionSpec).toList
+        case None => tableSource.getPartitions
+      }
+    }
+
+    def internalPartitionPrune(): util.List[util.Map[String, String]] = {
+      val allPartitions = getAllPartitions
+      PartitionPruner.prunePartitions(
+        config,
+        partitionFieldNames,
+        partitionFieldTypes,
+        allPartitions,
+        finalPartitionPredicate
+      )
+    }
+
+    val remainingPartitions: util.List[util.Map[String, String]] = catalog 
match {
+      case Some(catalog) =>
+        val converter = new RexNodeToExpressionConverter(
+          inputFields,
+          context.getFunctionCatalog,
+          context.getCatalogManager,
+          TimeZone.getTimeZone(config.getLocalTimeZone))
+        finalPartitionPredicate.accept(converter) match {
+          case Some(expression) =>
+            try {
+              catalog
+                  .listPartitionsByFilter(tableIdentifier.get.toObjectPath, 
Seq(expression))
+                  .map(_.getPartitionSpec)
+            } catch {
+              case _: UnsupportedOperationException =>
 
 Review comment:
   If we rely on the exception to fall back to internal implementation, we'd 
better mention it in the API contract, so that implementing class knows what to 
do if it doesn't support the API.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to