in spark src   this class org.apache.spark.deploy.worker.WorkerArguments

def inferDefaultCores(): Int = {
  Runtime.getRuntime.availableProcessors()
}

def inferDefaultMemory(): Int = {
  val ibmVendor = System.getProperty("java.vendor").contains("IBM")
  var totalMb = 0
  try {
    val bean = ManagementFactory.getOperatingSystemMXBean()
    if (ibmVendor) {
      val beanClass = 
Class.forName("com.ibm.lang.management.OperatingSystemMXBean")
      val method = beanClass.getDeclaredMethod("getTotalPhysicalMemory")
      totalMb = (method.invoke(bean).asInstanceOf[Long] / 1024 / 1024).toInt
    } else {
      val beanClass = Class.forName("com.sun.management.OperatingSystemMXBean")
      val method = beanClass.getDeclaredMethod("getTotalPhysicalMemorySize")
      totalMb = (method.invoke(bean).asInstanceOf[Long] / 1024 / 1024).toInt
    }
  } catch {
    case e: Exception => {
      totalMb = 2*1024
      System.out.println("Failed to get total physical memory. Using " + 
totalMb + " MB")
    }
  }
  // Leave out 1 GB for the operating system, but don't return a negative 
memory size
  math.max(totalMb - 1024, 512)
}




------------------ Original ------------------
From:  "swaranga";<sarma.swara...@gmail.com>;
Date:  Fri, May 22, 2015 03:31 PM
To:  "user"<user@spark.apache.org>; 

Subject:  Spark Memory management



Experts,

This is an academic question. Since Spark runs on the JVM, how it is able to
do things like offloading RDDs from memory to disk when the data cannot fit
into memory. How are the calculations performed? Does it use the methods
availabe in the java.lang.Runtime class to get free/available memory? How
accurate are these calculations?

Thanks for any inputs.



--
View this message in context: 
http://apache-spark-user-list.1001560.n3.nabble.com/Spark-Memory-management-tp22992.html
Sent from the Apache Spark User List mailing list archive at Nabble.com.

---------------------------------------------------------------------
To unsubscribe, e-mail: user-unsubscr...@spark.apache.org
For additional commands, e-mail: user-h...@spark.apache.org

Reply via email to