dongjoon-hyun commented on code in PR #38277:
URL: https://github.com/apache/spark/pull/38277#discussion_r1000419046
##########
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeReadWriteSuite.scala:
##########
@@ -218,4 +221,48 @@ class HiveSerDeReadWriteSuite extends QueryTest with
SQLTestUtils with TestHiveS
checkAnswer(spark.table("t1"), Seq(Row(Array("SPARK-34512",
"HIVE-24797"))))
}
}
+
+ test("SPARK-40815: Read SymlinkTextInputFormat") {
+ withTable("t") {
+ withTempDir { root =>
+ val dataPath = new File(root, "data")
+ val symlinkPath = new File(root, "symlink")
+
+ spark.range(10).selectExpr("cast(id as string) as value")
+ .repartition(4).write.text(dataPath.getAbsolutePath)
+
+ // Generate symlink manifest file.
+ val files = dataPath.listFiles().filter(_.getName.endsWith(".txt"))
+ assert(files.length > 0)
+
+ symlinkPath.mkdir()
+ Files.write(
+ new File(symlinkPath, "symlink.txt").toPath,
+ files.mkString("\n").getBytes(StandardCharsets.UTF_8)
+ )
+
+ sql(s"""
+ CREATE TABLE t (id bigint)
+ STORED AS
+ INPUTFORMAT 'org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat'
+ OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+ LOCATION '${symlinkPath.getAbsolutePath}';
+ """)
+
+ checkAnswer(
+ sql("SELECT id FROM t ORDER BY id ASC"),
+ (0 until 10).map(Row(_))
+ )
+
+ // Verify that with the flag disabled, we use the original
SymlinkTextInputFormat
+ // which has the empty splits issue and therefore the result should be
empty.
+ withSQLConf(USE_DELEGATE_FOR_SYMLINK_TEXT_INPUT_FORMAT.key -> "false")
{
Review Comment:
Thank you for adding this.
Shall we add `spark.hadoopRDD.ignoreEmptySplits=true` condition additionally
to be more explicit?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]