dongjoon-hyun commented on a change in pull request #33350:
URL: https://github.com/apache/spark/pull/33350#discussion_r671024091
##########
File path:
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/PruneFileSourcePartitionsSuite.scala
##########
@@ -42,35 +43,27 @@ class PruneFileSourcePartitionsSuite extends
PrunePartitionSuiteBase {
test("PruneFileSourcePartitions should not change the output of
LogicalRelation") {
withTable("test") {
- withTempDir { dir =>
- sql(
- s"""
- |CREATE EXTERNAL TABLE test(i int)
- |PARTITIONED BY (p int)
- |STORED AS parquet
- |LOCATION '${dir.toURI}'""".stripMargin)
-
- val tableMeta = spark.sharedState.externalCatalog.getTable("default",
"test")
- val catalogFileIndex = new CatalogFileIndex(spark, tableMeta, 0)
-
- val dataSchema = StructType(tableMeta.schema.filterNot { f =>
- tableMeta.partitionColumnNames.contains(f.name)
- })
- val relation = HadoopFsRelation(
- location = catalogFileIndex,
- partitionSchema = tableMeta.partitionSchema,
- dataSchema = dataSchema,
- bucketSpec = None,
- fileFormat = new ParquetFileFormat(),
- options = Map.empty)(sparkSession = spark)
-
- val logicalRelation = LogicalRelation(relation, tableMeta)
- val query = Project(Seq(Symbol("i"), Symbol("p")),
- Filter(Symbol("p") === 1, logicalRelation)).analyze
-
- val optimized = Optimize.execute(query)
- assert(optimized.missingInput.isEmpty)
- }
+ spark.range(10).selectExpr("id", "id % 3 as
p").write.partitionBy("p").saveAsTable("test")
Review comment:
FYI, `CREATE TABLE ... USING PARQUET` (spark syntax) and `CREATE TABLE
... STORED AS PARQUET` (hive syntax) generates different tables in Apache Spark.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]