seriously6 commented on a change in pull request #35278:
URL: https://github.com/apache/spark/pull/35278#discussion_r790283436
##########
File path: core/src/main/scala/org/apache/spark/util/Utils.scala
##########
@@ -605,6 +603,29 @@ private[spark] object Utils extends Logging {
}
}
+ def unZip(inFile: File, unzipDir: File): Unit = {
+ if (!unzipDir.mkdirs && !unzipDir.isDirectory) {
+ throw new IOException("Mkdirs failed to create " + unzipDir)
+ } else {
+ if (Shell.WINDOWS) {
+ FileUtil.unZip(inFile, unzipDir)
+ } else {
+ val untarCommand = new StringBuilder
+ untarCommand.append(s"mkdir $unzipDir && unzip -d ")
+ untarCommand.append(FileUtil.makeSecureShellPath(unzipDir))
+ untarCommand.append(" -o ")
+ untarCommand.append(FileUtil.makeSecureShellPath(inFile))
+ val shellCmd = Array("bash", "-c", untarCommand.toString)
+ val shexec = new ShellCommandExecutor(shellCmd)
+ shexec.execute()
+ val exitcode = shexec.getExitCode
+ if (exitcode != 0) {
+ throw new IOException("Error untarring file " + inFile + ". Tar
process exited with exit code " + exitcode)
Review comment:
- unzip
- Just suggestion that it's better with shell errorMsg
##########
File path: core/src/main/scala/org/apache/spark/util/Utils.scala
##########
@@ -3196,8 +3217,8 @@ private[spark] object Utils extends Logging {
entry = in.getNextEntry()
}
in.close() // so that any error in closing does not get ignored
- logInfo(s"Unzipped from $dfsZipFile\n\t${files.mkString("\n\t")}")
} finally {
+ logInfo(s"Unzipped from $dfsZipFile\n\t${files.mkString("\n\t")}")
Review comment:
nit
##########
File path: core/src/main/scala/org/apache/spark/util/Utils.scala
##########
@@ -605,6 +603,29 @@ private[spark] object Utils extends Logging {
}
}
+ def unZip(inFile: File, unzipDir: File): Unit = {
+ if (!unzipDir.mkdirs && !unzipDir.isDirectory) {
+ throw new IOException("Mkdirs failed to create " + unzipDir)
+ } else {
+ if (Shell.WINDOWS) {
+ FileUtil.unZip(inFile, unzipDir)
+ } else {
+ val untarCommand = new StringBuilder
+ untarCommand.append(s"mkdir $unzipDir && unzip -d ")
Review comment:
Why do we need to mkdir dest directory again?
##########
File path: core/src/main/scala/org/apache/spark/util/Utils.scala
##########
@@ -42,7 +41,6 @@ import scala.reflect.ClassTag
import scala.util.{Failure, Success, Try}
import scala.util.control.{ControlThrowable, NonFatal}
import scala.util.matching.Regex
-
Review comment:
does these blank line is related to this change?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]