Repository: spark
Updated Branches:
  refs/heads/master 5b754b45f -> e508f599f


[SPARK-2108] Mark SparkContext methods that return block information as 
developer API's

Author: Prashant Sharma <prashan...@imaginea.com>

Closes #1047 from ScrapCodes/SPARK-2108/mark-as-dev-api and squashes the 
following commits:

073ee34 [Prashant Sharma] [SPARK-2108] Mark SparkContext methods that return 
block information as developer API's


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/e508f599
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/e508f599
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/e508f599

Branch: refs/heads/master
Commit: e508f599f88baaa31a3498fb0bdbafdbc303119e
Parents: 5b754b4
Author: Prashant Sharma <prashan...@imaginea.com>
Authored: Wed Jun 11 10:49:34 2014 -0700
Committer: Patrick Wendell <pwend...@gmail.com>
Committed: Wed Jun 11 10:49:34 2014 -0700

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/SparkContext.scala | 4 ++++
 1 file changed, 4 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/e508f599/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala 
b/core/src/main/scala/org/apache/spark/SparkContext.scala
index d721aba..8bdaf0b 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -823,9 +823,11 @@ class SparkContext(config: SparkConf) extends Logging {
   }
 
   /**
+   * :: DeveloperApi ::
    * Return information about what RDDs are cached, if they are in mem or on 
disk, how much space
    * they take, etc.
    */
+  @DeveloperApi
   def getRDDStorageInfo: Array[RDDInfo] = {
     StorageUtils.rddInfoFromStorageStatus(getExecutorStorageStatus, this)
   }
@@ -837,8 +839,10 @@ class SparkContext(config: SparkConf) extends Logging {
   def getPersistentRDDs: Map[Int, RDD[_]] = persistentRdds.toMap
 
   /**
+   * :: DeveloperApi ::
    * Return information about blocks stored in all of the slaves
    */
+  @DeveloperApi
   def getExecutorStorageStatus: Array[StorageStatus] = {
     env.blockManager.master.getStorageStatus
   }

Reply via email to