vanzin commented on a change in pull request #23260: [SPARK-26311][CORE] New 
feature: apply custom log URL pattern for executor log URLs in SHS
URL: https://github.com/apache/spark/pull/23260#discussion_r250832154
 
 

 ##########
 File path: 
core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
 ##########
 @@ -291,6 +292,156 @@ class FsHistoryProviderSuite extends SparkFunSuite with 
BeforeAndAfter with Matc
     }
   }
 
+  test("log urls without customization") {
+    val conf = createTestConf()
+    val appId = "app1"
+    val user = "user1"
+
+    val executorInfos = (1 to 5).map(createTestExecutorInfo(appId, user, _))
+
+    val expected: Map[ExecutorInfo, Map[String, String]] = executorInfos.map { 
execInfo =>
+      execInfo -> execInfo.logUrlMap
+    }.toMap
+
+    testHandlingExecutorLogUrl(conf, expected)
+  }
+
+  test("custom log urls, including FILE_NAME") {
+    val conf = createTestConf()
+      .set(CUSTOM_EXECUTOR_LOG_URL, 
"http://newhost:9999/logs/clusters/{{CLUSTER_ID}}"; +
+        "/users/{{USER}}/containers/{{CONTAINER_ID}}/{{FILE_NAME}}")
+
+    // some of available attributes are not used in pattern which should be OK
+
+    val appId = "app1"
+    val user = "user1"
+
+    val executorInfos = (1 to 5).map(createTestExecutorInfo(appId, user, _))
+
+    val expected: Map[ExecutorInfo, Map[String, String]] = executorInfos.map { 
execInfo =>
+      val attr = execInfo.attributes
+      val newLogUrlMap = attr("LOG_FILES").split(",").map { file =>
+        val newLogUrl = 
s"http://newhost:9999/logs/clusters/${attr("CLUSTER_ID")}" +
+          s"/users/${attr("USER")}/containers/${attr("CONTAINER_ID")}/$file"
+        file -> newLogUrl
+      }.toMap
+
+      execInfo -> newLogUrlMap
+    }.toMap
+
+    testHandlingExecutorLogUrl(conf, expected)
+  }
+
+  test("custom log urls, excluding FILE_NAME") {
+    val conf = createTestConf()
+      .set(CUSTOM_EXECUTOR_LOG_URL, 
"http://newhost:9999/logs/clusters/{{CLUSTER_ID}}"; +
+        "/users/{{USER}}/containers/{{CONTAINER_ID}}")
+
+    // some of available attributes are not used in pattern which should be OK
+
+    val appId = "app1"
+    val user = "user1"
+
+    val executorInfos = (1 to 5).map(createTestExecutorInfo(appId, user, _))
+
+    val expected: Map[ExecutorInfo, Map[String, String]] = executorInfos.map { 
execInfo =>
+      val attr = execInfo.attributes
+      val newLogUrl = 
s"http://newhost:9999/logs/clusters/${attr("CLUSTER_ID")}" +
+        s"/users/${attr("USER")}/containers/${attr("CONTAINER_ID")}"
+
+      execInfo -> Map("log" -> newLogUrl)
+    }.toMap
+
+    testHandlingExecutorLogUrl(conf, expected)
+  }
+
+  test("custom log urls with invalid attribute") {
+    // Here we are referring {{NON_EXISTING}} which is not available in 
attributes,
+    // which Spark will fail back to provide origin log url with warning log.
+
+    val conf = createTestConf()
+      .set(CUSTOM_EXECUTOR_LOG_URL, 
"http://newhost:9999/logs/clusters/{{CLUSTER_ID}}"; +
+        
"/users/{{USER}}/containers/{{CONTAINER_ID}}/{{NON_EXISTING}}/{{FILE_NAME}}")
+
+    val appId = "app1"
 
 Review comment:
   Only used in one place, just inline the value. (Same below and in other 
tests.)

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to