HyukjinKwon commented on a change in pull request #30973:
URL: https://github.com/apache/spark/pull/30973#discussion_r550949666



##########
File path: 
sql/core/src/main/scala/org/apache/spark/sql/execution/BaseScriptTransformationExec.scala
##########
@@ -69,9 +75,19 @@ trait BaseScriptTransformationExec extends UnaryExecNode {
     }
   }
 
-  protected def initProc: (OutputStream, Process, InputStream, CircularBuffer) 
= {
-    val cmd = List("/bin/bash", "-c", script)
+  protected def initProc(hadoopConf: Configuration): ProcParameters = {
+    val wrapper = 
splitArgs(hadoopConf.get(SQLConf.SCRIPT_TRANSFORMATION_COMMAND_WRAPPER.key))
+    val cmdArgs = splitArgs(script)
+    val prog = cmdArgs(0)
+    if(!new File(prog).isAbsolute) {
+      val progFile = new File(SparkFiles.get(prog))
+      if (progFile.exists()) {
+        cmdArgs(0) = progFile.getAbsolutePath
+      }
+    }
+    val cmd = wrapper.toList ++ cmdArgs.toList
     val builder = new ProcessBuilder(cmd.asJava)
+      .directory(new File(SparkFiles.getRootDirectory()))

Review comment:
       That seems like because we're running with `local `cluster. If you run 
the tests with `local-cluster`, I guess it'd work. But okay, in this way we can 
cover `local` too.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to