mridulm commented on code in PR #38377:
URL: https://github.com/apache/spark/pull/38377#discussion_r1003551847


##########
core/src/test/scala/org/apache/spark/util/logging/DriverLoggerSuite.scala:
##########
@@ -59,15 +60,17 @@ class DriverLoggerSuite extends SparkFunSuite with 
LocalSparkContext {
 
     sc.stop()
     assert(!driverLogsDir.exists())
-    val dfsFile = FileUtils.getFile(sc.getConf.get(DRIVER_LOG_DFS_DIR).get,
-      app_id + DriverLogger.DRIVER_LOG_FILE_SUFFIX)
-    assert(dfsFile.exists())
-    assert(dfsFile.length() > 0)
+    val dfsFile = new Path(sc.getConf.get(DRIVER_LOG_DFS_DIR).get +
+      "/" + app_id + DriverLogger.DRIVER_LOG_FILE_SUFFIX)
+    val dfsFileStatus = 
dfsFile.getFileSystem(sc.hadoopConfiguration).getFileStatus(dfsFile)
+
+    assert(dfsFileStatus.isFile)
+    assert(dfsFileStatus.getLen > 0)
   }
 
   private def getSparkContext(): SparkContext = {
     val conf = new SparkConf()
-    conf.set(DRIVER_LOG_DFS_DIR, rootDfsDir.getAbsolutePath())
+    conf.set(DRIVER_LOG_DFS_DIR, "file://" + rootDfsDir.getAbsolutePath())
     conf.set(DRIVER_LOG_PERSISTTODFS, true)
     conf.set(SparkLauncher.SPARK_MASTER, "local")

Review Comment:
   Can we add a test specifically for this ? So that existing tests continue to 
check for local path ?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to