dongjoon-hyun commented on code in PR #38084:
URL: https://github.com/apache/spark/pull/38084#discussion_r989715672
##########
core/src/test/scala/org/apache/spark/deploy/SparkHadoopUtilSuite.scala:
##########
@@ -80,6 +89,43 @@ class SparkHadoopUtilSuite extends SparkFunSuite {
assertConfigValue(hadoopConf, "fs.s3a.endpoint", null)
}
+ /**
+ * spark.hive.* is passed to the hadoop config as hive.*.
+ */
+ test("spark.hive propagation") {
+ val sc = new SparkConf()
+ val hadoopConf = new Configuration(false)
+ sc.set("spark.hive.hiveoption", "value")
+ new SparkHadoopUtil().appendS3AndSparkHadoopHiveConfigurations(sc,
hadoopConf)
+ // the endpoint value will not have been set
+ assertConfigMatches(hadoopConf, "hive.hiveoption", "value",
hivePropagation)
+ }
+
+ /**
+ * The explicit buffer size propagation records this.
+ */
+ test("buffer size propagation") {
+ val sc = new SparkConf()
+ val hadoopConf = new Configuration(false)
+ sc.set(BUFFER_SIZE.key, "123")
+ new SparkHadoopUtil().appendS3AndSparkHadoopHiveConfigurations(sc,
hadoopConf)
+ // the endpoint value will not have been set
+ assertConfigMatches(hadoopConf, "io.file.buffer.size", "123",
BUFFER_SIZE.key)
+ }
+
+ test("aws credentials from environment variables") {
Review Comment:
```scala
- test("aws credentials from environment variables") {
+ test("SPARK-40640: aws credentials from environment variables") {
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]