Github user jinxing64 commented on a diff in the pull request: https://github.com/apache/spark/pull/19602#discussion_r192311969 --- Diff: sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala --- @@ -207,65 +271,68 @@ class HiveClientSuite(version: String) } private def testMetastorePartitionFiltering( - filterString: String, + table: String, + filterExpr: Expression, expectedDs: Seq[Int], expectedH: Seq[Int], expectedChunks: Seq[String]): Unit = { testMetastorePartitionFiltering( - filterString, - (expectedDs, expectedH, expectedChunks) :: Nil, + table, + filterExpr, + Map("ds" -> expectedDs, "h" -> expectedH, "chunk" -> expectedChunks) :: Nil, identity) } private def testMetastorePartitionFiltering( - filterString: String, + table: String, + filterExpr: Expression, expectedDs: Seq[Int], expectedH: Seq[Int], expectedChunks: Seq[String], transform: Expression => Expression): Unit = { testMetastorePartitionFiltering( - filterString, - (expectedDs, expectedH, expectedChunks) :: Nil, + table, + filterExpr, + Map("ds" -> expectedDs, "h" -> expectedH, "chunk" -> expectedChunks) :: Nil, identity) } private def testMetastorePartitionFiltering( - filterString: String, - expectedPartitionCubes: Seq[(Seq[Int], Seq[Int], Seq[String])]): Unit = { - testMetastorePartitionFiltering(filterString, expectedPartitionCubes, identity) + table: String, + filterExpr: Expression, + expectedPartitionCubes: Seq[Map[String, Seq[Any]]]): Unit = { + testMetastorePartitionFiltering(table, filterExpr, expectedPartitionCubes, identity) } private def testMetastorePartitionFiltering( - filterString: String, - expectedPartitionCubes: Seq[(Seq[Int], Seq[Int], Seq[String])], + table: String, + filterExpr: Expression, + expectedPartitionCubes: Seq[Map[String, Seq[Any]]], transform: Expression => Expression): Unit = { - val filteredPartitions = client.getPartitionsByFilter(client.getTable("default", "test"), + val filteredPartitions = client.getPartitionsByFilter(client.getTable("default", table), Seq( - transform(parseExpression(filterString)) + transform(filterExpr) )) - val expectedPartitionCount = expectedPartitionCubes.map { - case (expectedDs, expectedH, expectedChunks) => - expectedDs.size * expectedH.size * expectedChunks.size - }.sum - - val expectedPartitions = expectedPartitionCubes.map { - case (expectedDs, expectedH, expectedChunks) => - for { - ds <- expectedDs - h <- expectedH - chunk <- expectedChunks - } yield Set( - "ds" -> ds.toString, - "h" -> h.toString, - "chunk" -> chunk - ) - }.reduce(_ ++ _) + val expectedPartitionCount = expectedPartitionCubes.map(_.map(_._2.size).product).sum + + val expectedPartitions = expectedPartitionCubes.map(getPartitionsFromCube(_)).reduce(_ ++ _) val actualFilteredPartitionCount = filteredPartitions.size assert(actualFilteredPartitionCount == expectedPartitionCount, s"Expected $expectedPartitionCount partitions but got $actualFilteredPartitionCount") - assert(filteredPartitions.map(_.spec.toSet).toSet == expectedPartitions.toSet) + assert(filteredPartitions.map(_.spec).toSet == expectedPartitions.toSet) + } + --- End diff -- Parse the cube to be a list of partitions, the result is like `Seq[Map[partition name, partition value]]`
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org