viirya commented on code in PR #43205:
URL: https://github.com/apache/spark/pull/43205#discussion_r1346159087
##########
core/src/test/scala/org/apache/spark/deploy/SparkHadoopUtilSuite.scala:
##########
@@ -110,16 +110,18 @@ class SparkHadoopUtilSuite extends SparkFunSuite {
test("SPARK-40640: aws credentials from environment variables") {
val hadoopConf = new Configuration(false)
SparkHadoopUtil.appendS3CredentialsFromEnvironment(hadoopConf,
- "access-key", "secret-key", "session-token")
+ "endpoint", "access-key", "secret-key", "session-token")
val source = "Set by Spark on " + InetAddress.getLocalHost + " from "
+ assertConfigMatches(hadoopConf, "fs.s3a.endpoint", "endpoint", source)
assertConfigMatches(hadoopConf, "fs.s3a.access.key", "access-key", source)
assertConfigMatches(hadoopConf, "fs.s3a.secret.key", "secret-key", source)
assertConfigMatches(hadoopConf, "fs.s3a.session.token", "session-token",
source)
}
test("SPARK-19739: S3 session token propagation requires access and secret
keys") {
val hadoopConf = new Configuration(false)
- SparkHadoopUtil.appendS3CredentialsFromEnvironment(hadoopConf, null, null,
"session-token")
+ SparkHadoopUtil.appendS3CredentialsFromEnvironment(
+ hadoopConf, null, null, null, "session-token")
assertConfigValue(hadoopConf, "fs.s3a.session.token", null)
Review Comment:
I think it tests the default value. Just wonder why it tests the
requirements of access/secret keys (as they are both nulls).
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]