[01/51] [partial] spark-website git commit: Add 2.1.2 docs

2017-10-17 Thread holden
Repository: spark-website
Updated Branches:
  refs/heads/asf-site 0490125a8 -> a6155a89d


http://git-wip-us.apache.org/repos/asf/spark-website/blob/a6155a89/site/docs/2.1.2/api/java/org/apache/spark/api/r/RRDD.html
--
diff --git a/site/docs/2.1.2/api/java/org/apache/spark/api/r/RRDD.html 
b/site/docs/2.1.2/api/java/org/apache/spark/api/r/RRDD.html
new file mode 100644
index 000..19fea40
--- /dev/null
+++ b/site/docs/2.1.2/api/java/org/apache/spark/api/r/RRDD.html
@@ -0,0 +1,2158 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+RRDD (Spark 2.1.2 JavaDoc)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 
Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.spark.api.r
+Class RRDD
+
+
+
+Object
+
+
+org.apache.spark.rdd.RDD
+
+
+org.apache.spark.api.r.BaseRRDD
+
+
+org.apache.spark.api.r.RRDD
+
+
+
+
+
+
+
+
+
+
+
+All Implemented Interfaces:
+java.io.Serializable
+
+
+
+public class RRDD
+extends BaseRRDD
+An RDD that stores serialized R objects as 
Array[Byte].
+See Also:Serialized
 Form
+
+
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+RRDD(RDD parent,
+byte[] func,
+String deserializer,
+String serializer,
+byte[] packageNames,
+Object[] broadcastVars,
+scala.reflect.ClassTag evidence$4) 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods 
+
+Modifier and Type
+Method and Description
+
+
+static RDD
+$plus$plus(RDD other) 
+
+
+static  U
+aggregate(U zeroValue,
+ scala.Function2 seqOp,
+ scala.Function2 combOp,
+ scala.reflect.ClassTag evidence$30) 
+
+
+JavaRDD
+asJavaRDD() 
+
+
+static RDD
+cache() 
+
+
+static  RDD>
+cartesian(RDD other,
+ scala.reflect.ClassTag evidence$5) 
+
+
+static void
+checkpoint() 
+
+
+static RDD
+coalesce(int numPartitions,
+boolean shuffle,
+scala.Option partitionCoalescer,
+scala.math.Ordering ord) 
+
+
+static boolean
+coalesce$default$2() 
+
+
+static scala.Option
+coalesce$default$3() 
+
+
+static scala.math.Ordering
+coalesce$default$4(int numPartitions,
+  boolean shuffle,
+  scala.Option partitionCoalescer) 
+
+
+static Object
+collect() 
+
+
+static  RDD
+collect(scala.PartialFunction f,
+   scala.reflect.ClassTag evidence$29) 
+
+
+static 
scala.collection.Iterator
+compute(Partition partition,
+   TaskContext context) 
+
+
+static SparkContext
+context() 
+
+
+static long
+count() 
+
+
+static PartialResult
+countApprox(long timeout,
+   double confidence) 
+
+
+static double
+countApprox$default$2() 
+
+
+static long
+countApproxDistinct(double relativeSD) 
+
+
+static long
+countApproxDistinct(int p,
+   int sp) 
+
+
+static double
+countApproxDistinct$default$1() 
+
+
+static 
scala.collection.Map
+countByValue(scala.math.Ordering ord) 
+
+
+static scala.math.Ordering
+countByValue$default$1() 
+
+
+static PartialResult>
+countByValueApprox(long timeout,
+  double confidence,
+  scala.math.Ordering ord) 
+
+
+static double
+countByValueApprox$default$2() 
+
+
+static scala.math.Ordering
+countByValueApprox$default$3(long timeout,
+double confidence) 
+
+
+static JavaRDD
+createRDDFromArray(JavaSparkContext jsc,
+  byte[][] arr)
+Create an RRDD given a sequence of byte arrays.
+
+
+
+static JavaRDD
+createRDDFromFile(JavaSparkContext jsc,
+ String fileName,
+ int parallelism)
+Create an RRDD given a temporary file name.
+
+
+
+static JavaSparkContext
+createSparkContext(String master,
+  String appName,
+  String sparkHome,
+  String[] jars,
+  java.util.Map sparkEnvirMap,
+  
java.util.Map sparkExecutorEnvMap) 
+
+
+static scala.collection.Seq>
+dependencies() 
+
+
+static RDD
+distinct() 
+
+
+static RDD
+distinct(int numPartitions,
+scala.math.Ordering ord)

[01/51] [partial] spark-website git commit: Add 2.1.2 docs

Repository: spark-website
Updated Branches:
  refs/heads/apache-asf-site [created] a6d9cbdef


http://git-wip-us.apache.org/repos/asf/spark-website/blob/a6d9cbde/site/docs/2.1.2/api/java/org/apache/spark/api/r/RRDD.html
--
diff --git a/site/docs/2.1.2/api/java/org/apache/spark/api/r/RRDD.html 
b/site/docs/2.1.2/api/java/org/apache/spark/api/r/RRDD.html
new file mode 100644
index 000..19fea40
--- /dev/null
+++ b/site/docs/2.1.2/api/java/org/apache/spark/api/r/RRDD.html
@@ -0,0 +1,2158 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+RRDD (Spark 2.1.2 JavaDoc)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 
Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.spark.api.r
+Class RRDD
+
+
+
+Object
+
+
+org.apache.spark.rdd.RDD
+
+
+org.apache.spark.api.r.BaseRRDD
+
+
+org.apache.spark.api.r.RRDD
+
+
+
+
+
+
+
+
+
+
+
+All Implemented Interfaces:
+java.io.Serializable
+
+
+
+public class RRDD
+extends BaseRRDD
+An RDD that stores serialized R objects as 
Array[Byte].
+See Also:Serialized
 Form
+
+
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+RRDD(RDD parent,
+byte[] func,
+String deserializer,
+String serializer,
+byte[] packageNames,
+Object[] broadcastVars,
+scala.reflect.ClassTag evidence$4) 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods 
+
+Modifier and Type
+Method and Description
+
+
+static RDD
+$plus$plus(RDD other) 
+
+
+static  U
+aggregate(U zeroValue,
+ scala.Function2 seqOp,
+ scala.Function2 combOp,
+ scala.reflect.ClassTag evidence$30) 
+
+
+JavaRDD
+asJavaRDD() 
+
+
+static RDD
+cache() 
+
+
+static  RDD>
+cartesian(RDD other,
+ scala.reflect.ClassTag evidence$5) 
+
+
+static void
+checkpoint() 
+
+
+static RDD
+coalesce(int numPartitions,
+boolean shuffle,
+scala.Option partitionCoalescer,
+scala.math.Ordering ord) 
+
+
+static boolean
+coalesce$default$2() 
+
+
+static scala.Option
+coalesce$default$3() 
+
+
+static scala.math.Ordering
+coalesce$default$4(int numPartitions,
+  boolean shuffle,
+  scala.Option partitionCoalescer) 
+
+
+static Object
+collect() 
+
+
+static  RDD
+collect(scala.PartialFunction f,
+   scala.reflect.ClassTag evidence$29) 
+
+
+static 
scala.collection.Iterator
+compute(Partition partition,
+   TaskContext context) 
+
+
+static SparkContext
+context() 
+
+
+static long
+count() 
+
+
+static PartialResult
+countApprox(long timeout,
+   double confidence) 
+
+
+static double
+countApprox$default$2() 
+
+
+static long
+countApproxDistinct(double relativeSD) 
+
+
+static long
+countApproxDistinct(int p,
+   int sp) 
+
+
+static double
+countApproxDistinct$default$1() 
+
+
+static 
scala.collection.Map
+countByValue(scala.math.Ordering ord) 
+
+
+static scala.math.Ordering
+countByValue$default$1() 
+
+
+static PartialResult>
+countByValueApprox(long timeout,
+  double confidence,
+  scala.math.Ordering ord) 
+
+
+static double
+countByValueApprox$default$2() 
+
+
+static scala.math.Ordering
+countByValueApprox$default$3(long timeout,
+double confidence) 
+
+
+static JavaRDD
+createRDDFromArray(JavaSparkContext jsc,
+  byte[][] arr)
+Create an RRDD given a sequence of byte arrays.
+
+
+
+static JavaRDD
+createRDDFromFile(JavaSparkContext jsc,
+ String fileName,
+ int parallelism)
+Create an RRDD given a temporary file name.
+
+
+
+static JavaSparkContext
+createSparkContext(String master,
+  String appName,
+  String sparkHome,
+  String[] jars,
+  java.util.Map sparkEnvirMap,
+  
java.util.Map sparkExecutorEnvMap) 
+
+
+static scala.collection.Seq>
+dependencies() 
+
+
+static RDD
+distinct() 
+
+
+static RDD
+distinct(int numPartitions,
+scala.math.Ordering 

[01/51] [partial] spark-website git commit: Add 2.1.2 docs

Repository: spark-website
Updated Branches:
  refs/heads/add-2.1.2-docs [created] 0b563c84c


http://git-wip-us.apache.org/repos/asf/spark-website/blob/0b563c84/site/docs/2.1.2/api/java/org/apache/spark/api/r/RRDD.html
--
diff --git a/site/docs/2.1.2/api/java/org/apache/spark/api/r/RRDD.html 
b/site/docs/2.1.2/api/java/org/apache/spark/api/r/RRDD.html
new file mode 100644
index 000..19fea40
--- /dev/null
+++ b/site/docs/2.1.2/api/java/org/apache/spark/api/r/RRDD.html
@@ -0,0 +1,2158 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+RRDD (Spark 2.1.2 JavaDoc)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 
Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.spark.api.r
+Class RRDD
+
+
+
+Object
+
+
+org.apache.spark.rdd.RDD
+
+
+org.apache.spark.api.r.BaseRRDD
+
+
+org.apache.spark.api.r.RRDD
+
+
+
+
+
+
+
+
+
+
+
+All Implemented Interfaces:
+java.io.Serializable
+
+
+
+public class RRDD
+extends BaseRRDD
+An RDD that stores serialized R objects as 
Array[Byte].
+See Also:Serialized
 Form
+
+
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+RRDD(RDD parent,
+byte[] func,
+String deserializer,
+String serializer,
+byte[] packageNames,
+Object[] broadcastVars,
+scala.reflect.ClassTag evidence$4) 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods 
+
+Modifier and Type
+Method and Description
+
+
+static RDD
+$plus$plus(RDD other) 
+
+
+static  U
+aggregate(U zeroValue,
+ scala.Function2 seqOp,
+ scala.Function2 combOp,
+ scala.reflect.ClassTag evidence$30) 
+
+
+JavaRDD
+asJavaRDD() 
+
+
+static RDD
+cache() 
+
+
+static  RDD>
+cartesian(RDD other,
+ scala.reflect.ClassTag evidence$5) 
+
+
+static void
+checkpoint() 
+
+
+static RDD
+coalesce(int numPartitions,
+boolean shuffle,
+scala.Option partitionCoalescer,
+scala.math.Ordering ord) 
+
+
+static boolean
+coalesce$default$2() 
+
+
+static scala.Option
+coalesce$default$3() 
+
+
+static scala.math.Ordering
+coalesce$default$4(int numPartitions,
+  boolean shuffle,
+  scala.Option partitionCoalescer) 
+
+
+static Object
+collect() 
+
+
+static  RDD
+collect(scala.PartialFunction f,
+   scala.reflect.ClassTag evidence$29) 
+
+
+static 
scala.collection.Iterator
+compute(Partition partition,
+   TaskContext context) 
+
+
+static SparkContext
+context() 
+
+
+static long
+count() 
+
+
+static PartialResult
+countApprox(long timeout,
+   double confidence) 
+
+
+static double
+countApprox$default$2() 
+
+
+static long
+countApproxDistinct(double relativeSD) 
+
+
+static long
+countApproxDistinct(int p,
+   int sp) 
+
+
+static double
+countApproxDistinct$default$1() 
+
+
+static 
scala.collection.Map
+countByValue(scala.math.Ordering ord) 
+
+
+static scala.math.Ordering
+countByValue$default$1() 
+
+
+static PartialResult>
+countByValueApprox(long timeout,
+  double confidence,
+  scala.math.Ordering ord) 
+
+
+static double
+countByValueApprox$default$2() 
+
+
+static scala.math.Ordering
+countByValueApprox$default$3(long timeout,
+double confidence) 
+
+
+static JavaRDD
+createRDDFromArray(JavaSparkContext jsc,
+  byte[][] arr)
+Create an RRDD given a sequence of byte arrays.
+
+
+
+static JavaRDD
+createRDDFromFile(JavaSparkContext jsc,
+ String fileName,
+ int parallelism)
+Create an RRDD given a temporary file name.
+
+
+
+static JavaSparkContext
+createSparkContext(String master,
+  String appName,
+  String sparkHome,
+  String[] jars,
+  java.util.Map sparkEnvirMap,
+  
java.util.Map sparkExecutorEnvMap) 
+
+
+static scala.collection.Seq>
+dependencies() 
+
+
+static RDD
+distinct() 
+
+
+static RDD
+distinct(int numPartitions,
+scala.math.Ordering o