Repository: spark
Updated Branches:
  refs/heads/branch-2.0 99943bf69 -> 3796a98cf


[SPARK-17711][TEST-HADOOP2.2] Fix hadoop2.2 compilation error

## What changes were proposed in this pull request?

Fix hadoop2.2 compilation error.

## How was this patch tested?

Existing tests.

cc tdas zsxwing

Author: Yu Peng <loneknigh...@gmail.com>

Closes #15537 from loneknightpy/fix-17711.

(cherry picked from commit 2629cd74602cfe77188b76428fed62a7a7149315)
Signed-off-by: Shixiong Zhu <shixi...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/3796a98c
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/3796a98c
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/3796a98c

Branch: refs/heads/branch-2.0
Commit: 3796a98cf3efad1dcbef536b295c7c47bf47d5dd
Parents: 99943bf
Author: Yu Peng <loneknigh...@gmail.com>
Authored: Tue Oct 18 19:43:08 2016 -0700
Committer: Shixiong Zhu <shixi...@databricks.com>
Committed: Tue Oct 18 19:43:17 2016 -0700

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/util/Utils.scala | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/3796a98c/core/src/main/scala/org/apache/spark/util/Utils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala 
b/core/src/main/scala/org/apache/spark/util/Utils.scala
index a8532b2..3d862f4 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -42,7 +42,6 @@ import scala.util.control.{ControlThrowable, NonFatal}
 import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache}
 import com.google.common.io.{ByteStreams, Files => GFiles}
 import com.google.common.net.InetAddresses
-import org.apache.commons.io.IOUtils
 import org.apache.commons.lang3.SystemUtils
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.{FileSystem, FileUtil, Path}
@@ -1494,10 +1493,10 @@ private[spark] object Utils extends Logging {
       val gzInputStream = new GZIPInputStream(new FileInputStream(file))
       val bufSize = 1024
       val buf = new Array[Byte](bufSize)
-      var numBytes = IOUtils.read(gzInputStream, buf)
+      var numBytes = ByteStreams.read(gzInputStream, buf, 0, bufSize)
       while (numBytes > 0) {
         fileSize += numBytes
-        numBytes = IOUtils.read(gzInputStream, buf)
+        numBytes = ByteStreams.read(gzInputStream, buf, 0, bufSize)
       }
       fileSize
     } catch {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to