[ 
https://issues.apache.org/jira/browse/SPARK-18181?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Imran Rashid closed SPARK-18181.
--------------------------------
    Resolution: Information Provided

> Huge managed memory leak (2.7G) when running reduceByKey
> --------------------------------------------------------
>
>                 Key: SPARK-18181
>                 URL: https://issues.apache.org/jira/browse/SPARK-18181
>             Project: Spark
>          Issue Type: Bug
>          Components: Spark Core
>    Affects Versions: 1.6.2
>            Reporter: Barry Becker
>
> For a while now, I have noticed messages like 
> 16/10/31 09:44:25 ERROR Executor: Managed memory leak detected; size = 
> 5251642 bytes, TID = 64204
> when running jobs with spark 1.6.2.
> I have seen others post bugs on this, but they are all marked fixed in 
> earlier versions. I am certain that the issue still exists in 1.6.2.
> In the following case, I can even get it to leak 2.7G all at once.
> The message is:
> 16/10/31 11:12:47 ERROR Executor: Managed memory leak detected; size = 
> 2724723111 bytes, TID = 18
> The code snippet causing it is:
> {code}
> val nonZeros: RDD[((Int, Float), Array[Long])] =
>       featureValues.map(y => (y._1._1 + "," + y._1._2, y._2)).reduceByKey { 
> case (v1, v2) =>
>       (v1, v2).zipped.map(_ + _)
>     }.map(y => {
>       val s = y._1.split(",")
>       ((s(0).toInt, s(1).toFloat), y._2)
>     })
> {code}
> and the stack trace is:
> {code}
> 16/10/31 11:12:47 ERROR Executor: Exception in task 0.0 in stage 11.0 (TID 18)
> java.lang.OutOfMemoryError: Java heap space
>       at 
> scala.collection.mutable.ArrayBuilder$ofLong.mkArray(ArrayBuilder.scala:388)
>       at 
> scala.collection.mutable.ArrayBuilder$ofLong.resize(ArrayBuilder.scala:394)
>       at 
> scala.collection.mutable.ArrayBuilder$ofLong.sizeHint(ArrayBuilder.scala:399)
>       at scala.collection.mutable.Builder$class.sizeHint(Builder.scala:69)
>       at scala.collection.mutable.ArrayBuilder.sizeHint(ArrayBuilder.scala:22)
>       at scala.runtime.Tuple2Zipped$.map$extension(Tuple2Zipped.scala:41)
>       at 
> org.apache.spark.mllib.feature.MDLPDiscretizer$$anonfun$11.apply(MDLPDiscretizer.scala:151)
>       at 
> org.apache.spark.mllib.feature.MDLPDiscretizer$$anonfun$11.apply(MDLPDiscretizer.scala:150)
>       at 
> org.apache.spark.util.collection.ExternalSorter$$anonfun$5.apply(ExternalSorter.scala:187)
>       at 
> org.apache.spark.util.collection.ExternalSorter$$anonfun$5.apply(ExternalSorter.scala:186)
>       at 
> org.apache.spark.util.collection.AppendOnlyMap.changeValue(AppendOnlyMap.scala:144)
>       at 
> org.apache.spark.util.collection.SizeTrackingAppendOnlyMap.changeValue(SizeTrackingAppendOnlyMap.scala:32)
>       at 
> org.apache.spark.util.collection.ExternalSorter.insertAll(ExternalSorter.scala:192)
>       at 
> org.apache.spark.shuffle.sort.SortShuffleWriter.write(SortShuffleWriter.scala:64)
>       at 
> org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:73)
>       at 
> org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41)
>       at org.apache.spark.scheduler.Task.run(Task.scala:89)
>       at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:227)
>       at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>       at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>       at java.lang.Thread.run(Thread.java:745)
> 16/10/31 11:12:47 ERROR SparkUncaughtExceptionHandler: Uncaught exception in 
> thread Thread[Executor task launch worker-0,5,main]
> java.lang.OutOfMemoryError: Java heap space
>       at 
> scala.collection.mutable.ArrayBuilder$ofLong.mkArray(ArrayBuilder.scala:388)
>       at 
> scala.collection.mutable.ArrayBuilder$ofLong.resize(ArrayBuilder.scala:394)
>       at 
> scala.collection.mutable.ArrayBuilder$ofLong.sizeHint(ArrayBuilder.scala:399)
>       at scala.collection.mutable.Builder$class.sizeHint(Builder.scala:69)
>       at scala.collection.mutable.ArrayBuilder.sizeHint(ArrayBuilder.scala:22)
>       at scala.runtime.Tuple2Zipped$.map$extension(Tuple2Zipped.scala:41)
>       at 
> org.apache.spark.mllib.feature.MDLPDiscretizer$$anonfun$11.apply(MDLPDiscretizer.scala:151)
>       at 
> org.apache.spark.mllib.feature.MDLPDiscretizer$$anonfun$11.apply(MDLPDiscretizer.scala:150)
>       at 
> org.apache.spark.util.collection.ExternalSorter$$anonfun$5.apply(ExternalSorter.scala:187)
>       at 
> org.apache.spark.util.collection.ExternalSorter$$anonfun$5.apply(ExternalSorter.scala:186)
>       at 
> org.apache.spark.util.collection.AppendOnlyMap.changeValue(AppendOnlyMap.scala:144)
>       at 
> org.apache.spark.util.collection.SizeTrackingAppendOnlyMap.changeValue(SizeTrackingAppendOnlyMap.scala:32)
>       at 
> org.apache.spark.util.collection.ExternalSorter.insertAll(ExternalSorter.scala:192)
>       at 
> org.apache.spark.shuffle.sort.SortShuffleWriter.write(SortShuffleWriter.scala:64)
>       at 
> org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:73)
>       at 
> org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41)
>       at org.apache.spark.scheduler.Task.run(Task.scala:89)
>       at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:227)
>       at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>       at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>       at java.lang.Thread.run(Thread.java:745)
> 16/10/31 11:12:47 WARN TaskSetManager: Lost task 0.0 in stage 11.0 (TID 18, 
> localhost): java.lang.OutOfMemoryError: Java heap space
>       at 
> scala.collection.mutable.ArrayBuilder$ofLong.mkArray(ArrayBuilder.scala:388)
>       at 
> scala.collection.mutable.ArrayBuilder$ofLong.resize(ArrayBuilder.scala:394)
>       at 
> scala.collection.mutable.ArrayBuilder$ofLong.sizeHint(ArrayBuilder.scala:399)
>       at scala.collection.mutable.Builder$class.sizeHint(Builder.scala:69)
>       at scala.collection.mutable.ArrayBuilder.sizeHint(ArrayBuilder.scala:22)
>       at scala.runtime.Tuple2Zipped$.map$extension(Tuple2Zipped.scala:41)
>       at 
> org.apache.spark.mllib.feature.MDLPDiscretizer$$anonfun$11.apply(MDLPDiscretizer.scala:151)
>       at 
> org.apache.spark.mllib.feature.MDLPDiscretizer$$anonfun$11.apply(MDLPDiscretizer.scala:150)
>       at 
> org.apache.spark.util.collection.ExternalSorter$$anonfun$5.apply(ExternalSorter.scala:187)
>       at 
> org.apache.spark.util.collection.ExternalSorter$$anonfun$5.apply(ExternalSorter.scala:186)
>       at 
> org.apache.spark.util.collection.AppendOnlyMap.changeValue(AppendOnlyMap.scala:144)
>       at 
> org.apache.spark.util.collection.SizeTrackingAppendOnlyMap.changeValue(SizeTrackingAppendOnlyMap.scala:32)
>       at 
> org.apache.spark.util.collection.ExternalSorter.insertAll(ExternalSorter.scala:192)
>       at 
> org.apache.spark.shuffle.sort.SortShuffleWriter.write(SortShuffleWriter.scala:64)
>       at 
> org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:73)
>       at 
> org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41)
>       at org.apache.spark.scheduler.Task.run(Task.scala:89)
> {code}
> If you want to run the example, you can checkout this branch of the MDLP 
> project 
> https://github.com/barrybecker4/spark-MDLP-discretization/tree/ISSUE-14-performance
> and going to commit cd9c797 and running the MDLPDiscretizerHugeSuite.



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to