shrutisinghania commented on code in PR #52027:
URL: https://github.com/apache/spark/pull/52027#discussion_r2538083433
##########
core/src/test/scala/org/apache/spark/deploy/SparkHadoopUtilSuite.scala:
##########
@@ -52,6 +55,49 @@ class SparkHadoopUtilSuite extends SparkFunSuite {
assertConfigValue(hadoopConf, "fs.s3a.downgrade.syncable.exceptions",
"false")
}
+ /**
+ * Verify that the GCS user agent is set correctly when no custom suffix is
provided.
+ */
+ test("SPARK-52336: GCS user agent should be set when not provided by user") {
+ assume(gcsConnectorAvailable, s"GCS connector '$gcsConnectorClassName' not
available.")
+ val sparkConf = new SparkConf()
+ val hadoopConf = SparkHadoopUtil.newConfiguration(sparkConf)
+
+ val expectedUserAgent = s"apache-spark/${org.apache.spark.SPARK_VERSION}
(GPN:apache-spark)"
+ assertConfigMatches(hadoopConf, "fs.gs.application.name.suffix",
expectedUserAgent,
+ SOURCE_SPARK)
+ }
+
+ /**
+ * Verify that the Spark identifier is prepended to a user-provided GCS user
agent suffix.
+ */
+ test("SPARK-52336: GCS user agent should be prepended when suffix is
provided by user") {
Review Comment:
We are not overwriting the user's custom suffix, but rather enhancing it by
prepending a standard Spark identifier in an idempotent way. This gives us
standardized observability across all Spark jobs on GCS while preserving the
user's custom configuration.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]