[07/51] [partial] spark-website git commit: Add 2.1.2 docs

2017-10-17 Thread holden
http://git-wip-us.apache.org/repos/asf/spark-website/blob/a6155a89/site/docs/2.1.2/api/java/org/apache/spark/api/java/JavaSparkContext.html
--
diff --git 
a/site/docs/2.1.2/api/java/org/apache/spark/api/java/JavaSparkContext.html 
b/site/docs/2.1.2/api/java/org/apache/spark/api/java/JavaSparkContext.html
new file mode 100644
index 000..6927b66
--- /dev/null
+++ b/site/docs/2.1.2/api/java/org/apache/spark/api/java/JavaSparkContext.html
@@ -0,0 +1,2088 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+JavaSparkContext (Spark 2.1.2 JavaDoc)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 
Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.spark.api.java
+Class JavaSparkContext
+
+
+
+Object
+
+
+org.apache.spark.api.java.JavaSparkContext
+
+
+
+
+
+
+
+All Implemented Interfaces:
+java.io.Closeable, AutoCloseable
+
+
+
+public class JavaSparkContext
+extends Object
+implements java.io.Closeable
+A Java-friendly version of SparkContext that returns
+ JavaRDDs and works with Java 
collections instead of Scala ones.
+ 
+ Only one SparkContext may be active per JVM.  You must stop() 
the active SparkContext before
+ creating a new one.  This limitation may eventually be removed; see 
SPARK-2243 for more details.
+
+
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+JavaSparkContext()
+Create a JavaSparkContext that loads settings from system 
properties (for instance, when
+ launching with ./bin/spark-submit).
+
+
+
+JavaSparkContext(SparkConf conf) 
+
+
+JavaSparkContext(SparkContext sc) 
+
+
+JavaSparkContext(String master,
+String appName) 
+
+
+JavaSparkContext(String master,
+String appName,
+SparkConf conf) 
+
+
+JavaSparkContext(String master,
+String appName,
+String sparkHome,
+String jarFile) 
+
+
+JavaSparkContext(String master,
+String appName,
+String sparkHome,
+String[] jars) 
+
+
+JavaSparkContext(String master,
+String appName,
+String sparkHome,
+String[] jars,
+
java.util.Map environment) 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods 
+
+Modifier and Type
+Method and Description
+
+
+ Accumulable
+accumulable(T initialValue,
+   AccumulableParam param)
+Deprecated. 
+use AccumulatorV2. Since 2.0.0.
+
+
+
+
+ Accumulable
+accumulable(T initialValue,
+   String name,
+   AccumulableParam param)
+Deprecated. 
+use AccumulatorV2. Since 2.0.0.
+
+
+
+
+Accumulator
+accumulator(double initialValue)
+Deprecated. 
+use sc().doubleAccumulator(). Since 2.0.0.
+
+
+
+
+Accumulator
+accumulator(double initialValue,
+   String name)
+Deprecated. 
+use sc().doubleAccumulator(String). Since 
2.0.0.
+
+
+
+
+Accumulator
+accumulator(int initialValue)
+Deprecated. 
+use sc().longAccumulator(). Since 2.0.0.
+
+
+
+
+Accumulator
+accumulator(int initialValue,
+   String name)
+Deprecated. 
+use sc().longAccumulator(String). Since 2.0.0.
+
+
+
+
+ Accumulator
+accumulator(T initialValue,
+   AccumulatorParam accumulatorParam)
+Deprecated. 
+use AccumulatorV2. Since 2.0.0.
+
+
+
+
+ Accumulator
+accumulator(T initialValue,
+   String name,
+   AccumulatorParam accumulatorParam)
+Deprecated. 
+use AccumulatorV2. Since 2.0.0.
+
+
+
+
+void
+addFile(String path)
+Add a file to be downloaded with this Spark job on every 
node.
+
+
+
+void
+addFile(String path,
+   boolean recursive)
+Add a file to be downloaded with this Spark job on every 
node.
+
+
+
+void
+addJar(String path)
+Adds a JAR dependency for all tasks to be executed on this 
SparkContext in the future.
+
+
+
+String
+appName() 
+
+
+JavaPairRDD
+binaryFiles(String path)
+Read a directory of binary files from HDFS, a local file 
system (available on all nodes),
+ or any Hadoop-supported file system URI as a byte array.
+
+
+
+JavaPairRDD
+binaryFiles(String path,
+   int minPartitions)
+Read a directory of binary files from HDFS, a local file 
system (available on all no

[07/51] [partial] spark-website git commit: Add 2.1.2 docs

2017-10-17 Thread holden
http://git-wip-us.apache.org/repos/asf/spark-website/blob/a6d9cbde/site/docs/2.1.2/api/java/org/apache/spark/api/java/JavaSparkContext.html
--
diff --git 
a/site/docs/2.1.2/api/java/org/apache/spark/api/java/JavaSparkContext.html 
b/site/docs/2.1.2/api/java/org/apache/spark/api/java/JavaSparkContext.html
new file mode 100644
index 000..6927b66
--- /dev/null
+++ b/site/docs/2.1.2/api/java/org/apache/spark/api/java/JavaSparkContext.html
@@ -0,0 +1,2088 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+JavaSparkContext (Spark 2.1.2 JavaDoc)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 
Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.spark.api.java
+Class JavaSparkContext
+
+
+
+Object
+
+
+org.apache.spark.api.java.JavaSparkContext
+
+
+
+
+
+
+
+All Implemented Interfaces:
+java.io.Closeable, AutoCloseable
+
+
+
+public class JavaSparkContext
+extends Object
+implements java.io.Closeable
+A Java-friendly version of SparkContext that returns
+ JavaRDDs and works with Java 
collections instead of Scala ones.
+ 
+ Only one SparkContext may be active per JVM.  You must stop() 
the active SparkContext before
+ creating a new one.  This limitation may eventually be removed; see 
SPARK-2243 for more details.
+
+
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+JavaSparkContext()
+Create a JavaSparkContext that loads settings from system 
properties (for instance, when
+ launching with ./bin/spark-submit).
+
+
+
+JavaSparkContext(SparkConf conf) 
+
+
+JavaSparkContext(SparkContext sc) 
+
+
+JavaSparkContext(String master,
+String appName) 
+
+
+JavaSparkContext(String master,
+String appName,
+SparkConf conf) 
+
+
+JavaSparkContext(String master,
+String appName,
+String sparkHome,
+String jarFile) 
+
+
+JavaSparkContext(String master,
+String appName,
+String sparkHome,
+String[] jars) 
+
+
+JavaSparkContext(String master,
+String appName,
+String sparkHome,
+String[] jars,
+
java.util.Map environment) 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods 
+
+Modifier and Type
+Method and Description
+
+
+ Accumulable
+accumulable(T initialValue,
+   AccumulableParam param)
+Deprecated. 
+use AccumulatorV2. Since 2.0.0.
+
+
+
+
+ Accumulable
+accumulable(T initialValue,
+   String name,
+   AccumulableParam param)
+Deprecated. 
+use AccumulatorV2. Since 2.0.0.
+
+
+
+
+Accumulator
+accumulator(double initialValue)
+Deprecated. 
+use sc().doubleAccumulator(). Since 2.0.0.
+
+
+
+
+Accumulator
+accumulator(double initialValue,
+   String name)
+Deprecated. 
+use sc().doubleAccumulator(String). Since 
2.0.0.
+
+
+
+
+Accumulator
+accumulator(int initialValue)
+Deprecated. 
+use sc().longAccumulator(). Since 2.0.0.
+
+
+
+
+Accumulator
+accumulator(int initialValue,
+   String name)
+Deprecated. 
+use sc().longAccumulator(String). Since 2.0.0.
+
+
+
+
+ Accumulator
+accumulator(T initialValue,
+   AccumulatorParam accumulatorParam)
+Deprecated. 
+use AccumulatorV2. Since 2.0.0.
+
+
+
+
+ Accumulator
+accumulator(T initialValue,
+   String name,
+   AccumulatorParam accumulatorParam)
+Deprecated. 
+use AccumulatorV2. Since 2.0.0.
+
+
+
+
+void
+addFile(String path)
+Add a file to be downloaded with this Spark job on every 
node.
+
+
+
+void
+addFile(String path,
+   boolean recursive)
+Add a file to be downloaded with this Spark job on every 
node.
+
+
+
+void
+addJar(String path)
+Adds a JAR dependency for all tasks to be executed on this 
SparkContext in the future.
+
+
+
+String
+appName() 
+
+
+JavaPairRDD
+binaryFiles(String path)
+Read a directory of binary files from HDFS, a local file 
system (available on all nodes),
+ or any Hadoop-supported file system URI as a byte array.
+
+
+
+JavaPairRDD
+binaryFiles(String path,
+   int minPartitions)
+Read a directory of binary files from HDFS, a local file 
system (available on all no

[07/51] [partial] spark-website git commit: Add 2.1.2 docs

2017-10-17 Thread holden
http://git-wip-us.apache.org/repos/asf/spark-website/blob/0b563c84/site/docs/2.1.2/api/java/org/apache/spark/api/java/JavaSparkContext.html
--
diff --git 
a/site/docs/2.1.2/api/java/org/apache/spark/api/java/JavaSparkContext.html 
b/site/docs/2.1.2/api/java/org/apache/spark/api/java/JavaSparkContext.html
new file mode 100644
index 000..6927b66
--- /dev/null
+++ b/site/docs/2.1.2/api/java/org/apache/spark/api/java/JavaSparkContext.html
@@ -0,0 +1,2088 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+JavaSparkContext (Spark 2.1.2 JavaDoc)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 
Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.spark.api.java
+Class JavaSparkContext
+
+
+
+Object
+
+
+org.apache.spark.api.java.JavaSparkContext
+
+
+
+
+
+
+
+All Implemented Interfaces:
+java.io.Closeable, AutoCloseable
+
+
+
+public class JavaSparkContext
+extends Object
+implements java.io.Closeable
+A Java-friendly version of SparkContext that returns
+ JavaRDDs and works with Java 
collections instead of Scala ones.
+ 
+ Only one SparkContext may be active per JVM.  You must stop() 
the active SparkContext before
+ creating a new one.  This limitation may eventually be removed; see 
SPARK-2243 for more details.
+
+
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+JavaSparkContext()
+Create a JavaSparkContext that loads settings from system 
properties (for instance, when
+ launching with ./bin/spark-submit).
+
+
+
+JavaSparkContext(SparkConf conf) 
+
+
+JavaSparkContext(SparkContext sc) 
+
+
+JavaSparkContext(String master,
+String appName) 
+
+
+JavaSparkContext(String master,
+String appName,
+SparkConf conf) 
+
+
+JavaSparkContext(String master,
+String appName,
+String sparkHome,
+String jarFile) 
+
+
+JavaSparkContext(String master,
+String appName,
+String sparkHome,
+String[] jars) 
+
+
+JavaSparkContext(String master,
+String appName,
+String sparkHome,
+String[] jars,
+
java.util.Map environment) 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods 
+
+Modifier and Type
+Method and Description
+
+
+ Accumulable
+accumulable(T initialValue,
+   AccumulableParam param)
+Deprecated. 
+use AccumulatorV2. Since 2.0.0.
+
+
+
+
+ Accumulable
+accumulable(T initialValue,
+   String name,
+   AccumulableParam param)
+Deprecated. 
+use AccumulatorV2. Since 2.0.0.
+
+
+
+
+Accumulator
+accumulator(double initialValue)
+Deprecated. 
+use sc().doubleAccumulator(). Since 2.0.0.
+
+
+
+
+Accumulator
+accumulator(double initialValue,
+   String name)
+Deprecated. 
+use sc().doubleAccumulator(String). Since 
2.0.0.
+
+
+
+
+Accumulator
+accumulator(int initialValue)
+Deprecated. 
+use sc().longAccumulator(). Since 2.0.0.
+
+
+
+
+Accumulator
+accumulator(int initialValue,
+   String name)
+Deprecated. 
+use sc().longAccumulator(String). Since 2.0.0.
+
+
+
+
+ Accumulator
+accumulator(T initialValue,
+   AccumulatorParam accumulatorParam)
+Deprecated. 
+use AccumulatorV2. Since 2.0.0.
+
+
+
+
+ Accumulator
+accumulator(T initialValue,
+   String name,
+   AccumulatorParam accumulatorParam)
+Deprecated. 
+use AccumulatorV2. Since 2.0.0.
+
+
+
+
+void
+addFile(String path)
+Add a file to be downloaded with this Spark job on every 
node.
+
+
+
+void
+addFile(String path,
+   boolean recursive)
+Add a file to be downloaded with this Spark job on every 
node.
+
+
+
+void
+addJar(String path)
+Adds a JAR dependency for all tasks to be executed on this 
SparkContext in the future.
+
+
+
+String
+appName() 
+
+
+JavaPairRDD
+binaryFiles(String path)
+Read a directory of binary files from HDFS, a local file 
system (available on all nodes),
+ or any Hadoop-supported file system URI as a byte array.
+
+
+
+JavaPairRDD
+binaryFiles(String path,
+   int minPartitions)
+Read a directory of binary files from HDFS, a local file 
system (available on all no