[20/51] [partial] spark-website git commit: Add 2.1.2 docs

2017-10-17 Thread holden
http://git-wip-us.apache.org/repos/asf/spark-website/blob/a6155a89/site/docs/2.1.2/api/java/org/apache/spark/SparkConf.html
--
diff --git a/site/docs/2.1.2/api/java/org/apache/spark/SparkConf.html 
b/site/docs/2.1.2/api/java/org/apache/spark/SparkConf.html
new file mode 100644
index 000..4c33803
--- /dev/null
+++ b/site/docs/2.1.2/api/java/org/apache/spark/SparkConf.html
@@ -0,0 +1,1147 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+SparkConf (Spark 2.1.2 JavaDoc)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.spark
+Class SparkConf
+
+
+
+Object
+
+
+org.apache.spark.SparkConf
+
+
+
+
+
+
+
+All Implemented Interfaces:
+java.io.Serializable, Cloneable
+
+
+
+public class SparkConf
+extends Object
+implements scala.Cloneable, scala.Serializable
+Configuration for a Spark application. Used to set various 
Spark parameters as key-value pairs.
+ 
+ Most of the time, you would create a SparkConf object with new 
SparkConf(), which will load
+ values from any spark.* Java system properties set in your 
application as well. In this case,
+ parameters you set directly on the SparkConf object take 
priority over system properties.
+ 
+ For unit tests, you can also call new SparkConf(false) to skip 
loading external settings and
+ get the same configuration no matter what the system properties are.
+ 
+ All setter methods in this class support chaining. For example, you can write
+ new SparkConf().setMaster("local").setAppName("My app").
+ 
+ param:  loadDefaults whether to also load values from Java system properties
+ 
+See Also:Serialized 
FormNote:
+  Once a SparkConf object is passed to Spark, it is cloned and can no 
longer be modified
+ by the user. Spark does not support modifying the configuration at 
runtime.
+
+
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+SparkConf()
+Create a SparkConf that loads defaults from system 
properties and the classpath
+
+
+
+SparkConf(boolean loadDefaults) 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods 
+
+Modifier and Type
+Method and Description
+
+
+SparkConf
+clone()
+Copy this object
+
+
+
+boolean
+contains(String key)
+Does the configuration contain a given parameter?
+
+
+
+String
+get(String key)
+Get a parameter; throws a NoSuchElementException if it's 
not set
+
+
+
+String
+get(String key,
+   String defaultValue)
+Get a parameter, falling back to a default if not set
+
+
+
+scala.Tuple2[]
+getAll()
+Get all parameters as a list of pairs
+
+
+
+scala.Tuple2[]
+getAllWithPrefix(String prefix)
+Get all parameters that start with prefix
+
+
+
+String
+getAppId()
+Returns the Spark application id, valid in the Driver after 
TaskScheduler registration and
+ from the start in the Executor.
+
+
+
+scala.collection.immutable.Map
+getAvroSchema()
+Gets all the avro schemas in the configuration used in the 
generic Avro record serializer
+
+
+
+boolean
+getBoolean(String key,
+  boolean defaultValue)
+Get a parameter as a boolean, falling back to a default if 
not set
+
+
+
+static scala.Option
+getDeprecatedConfig(String key,
+   SparkConf conf)
+Looks for available deprecated keys for the given config 
option, and return the first
+ value available.
+
+
+
+double
+getDouble(String key,
+ double defaultValue)
+Get a parameter as a double, falling back to a default if 
not set
+
+
+
+scala.collection.Seq>
+getExecutorEnv()
+Get all executor environment variables set on this 
SparkConf
+
+
+
+int
+getInt(String key,
+  int defaultValue)
+Get a parameter as an integer, falling back to a default if 
not set
+
+
+
+long
+getLong(String key,
+   long defaultValue)
+Get a parameter as a long, falling back to a default if not 
set
+
+
+
+scala.Option
+getOption(String key)
+Get a parameter as an Option
+
+
+
+long
+getSizeAsBytes(String key)
+Get a size parameter as bytes; throws a 
NoSuchElementException if it's not set.
+
+
+
+long
+getSizeAsBytes(String key,
+  long defaultValue)
+Get a size parameter as bytes, falling back to a default if 
not set.
+
+
+
+long
+getSizeAsBytes(String key,
+  String defaultValue)
+Get

[20/51] [partial] spark-website git commit: Add 2.1.2 docs

2017-10-17 Thread holden
http://git-wip-us.apache.org/repos/asf/spark-website/blob/a6d9cbde/site/docs/2.1.2/api/java/org/apache/spark/SparkConf.html
--
diff --git a/site/docs/2.1.2/api/java/org/apache/spark/SparkConf.html 
b/site/docs/2.1.2/api/java/org/apache/spark/SparkConf.html
new file mode 100644
index 000..4c33803
--- /dev/null
+++ b/site/docs/2.1.2/api/java/org/apache/spark/SparkConf.html
@@ -0,0 +1,1147 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+SparkConf (Spark 2.1.2 JavaDoc)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.spark
+Class SparkConf
+
+
+
+Object
+
+
+org.apache.spark.SparkConf
+
+
+
+
+
+
+
+All Implemented Interfaces:
+java.io.Serializable, Cloneable
+
+
+
+public class SparkConf
+extends Object
+implements scala.Cloneable, scala.Serializable
+Configuration for a Spark application. Used to set various 
Spark parameters as key-value pairs.
+ 
+ Most of the time, you would create a SparkConf object with new 
SparkConf(), which will load
+ values from any spark.* Java system properties set in your 
application as well. In this case,
+ parameters you set directly on the SparkConf object take 
priority over system properties.
+ 
+ For unit tests, you can also call new SparkConf(false) to skip 
loading external settings and
+ get the same configuration no matter what the system properties are.
+ 
+ All setter methods in this class support chaining. For example, you can write
+ new SparkConf().setMaster("local").setAppName("My app").
+ 
+ param:  loadDefaults whether to also load values from Java system properties
+ 
+See Also:Serialized 
FormNote:
+  Once a SparkConf object is passed to Spark, it is cloned and can no 
longer be modified
+ by the user. Spark does not support modifying the configuration at 
runtime.
+
+
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+SparkConf()
+Create a SparkConf that loads defaults from system 
properties and the classpath
+
+
+
+SparkConf(boolean loadDefaults) 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods 
+
+Modifier and Type
+Method and Description
+
+
+SparkConf
+clone()
+Copy this object
+
+
+
+boolean
+contains(String key)
+Does the configuration contain a given parameter?
+
+
+
+String
+get(String key)
+Get a parameter; throws a NoSuchElementException if it's 
not set
+
+
+
+String
+get(String key,
+   String defaultValue)
+Get a parameter, falling back to a default if not set
+
+
+
+scala.Tuple2[]
+getAll()
+Get all parameters as a list of pairs
+
+
+
+scala.Tuple2[]
+getAllWithPrefix(String prefix)
+Get all parameters that start with prefix
+
+
+
+String
+getAppId()
+Returns the Spark application id, valid in the Driver after 
TaskScheduler registration and
+ from the start in the Executor.
+
+
+
+scala.collection.immutable.Map
+getAvroSchema()
+Gets all the avro schemas in the configuration used in the 
generic Avro record serializer
+
+
+
+boolean
+getBoolean(String key,
+  boolean defaultValue)
+Get a parameter as a boolean, falling back to a default if 
not set
+
+
+
+static scala.Option
+getDeprecatedConfig(String key,
+   SparkConf conf)
+Looks for available deprecated keys for the given config 
option, and return the first
+ value available.
+
+
+
+double
+getDouble(String key,
+ double defaultValue)
+Get a parameter as a double, falling back to a default if 
not set
+
+
+
+scala.collection.Seq>
+getExecutorEnv()
+Get all executor environment variables set on this 
SparkConf
+
+
+
+int
+getInt(String key,
+  int defaultValue)
+Get a parameter as an integer, falling back to a default if 
not set
+
+
+
+long
+getLong(String key,
+   long defaultValue)
+Get a parameter as a long, falling back to a default if not 
set
+
+
+
+scala.Option
+getOption(String key)
+Get a parameter as an Option
+
+
+
+long
+getSizeAsBytes(String key)
+Get a size parameter as bytes; throws a 
NoSuchElementException if it's not set.
+
+
+
+long
+getSizeAsBytes(String key,
+  long defaultValue)
+Get a size parameter as bytes, falling back to a default if 
not set.
+
+
+
+long
+getSizeAsBytes(String key,
+  String defaultValue)
+Get

[20/51] [partial] spark-website git commit: Add 2.1.2 docs

2017-10-17 Thread holden
http://git-wip-us.apache.org/repos/asf/spark-website/blob/0b563c84/site/docs/2.1.2/api/java/org/apache/spark/SparkConf.html
--
diff --git a/site/docs/2.1.2/api/java/org/apache/spark/SparkConf.html 
b/site/docs/2.1.2/api/java/org/apache/spark/SparkConf.html
new file mode 100644
index 000..4c33803
--- /dev/null
+++ b/site/docs/2.1.2/api/java/org/apache/spark/SparkConf.html
@@ -0,0 +1,1147 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+SparkConf (Spark 2.1.2 JavaDoc)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.spark
+Class SparkConf
+
+
+
+Object
+
+
+org.apache.spark.SparkConf
+
+
+
+
+
+
+
+All Implemented Interfaces:
+java.io.Serializable, Cloneable
+
+
+
+public class SparkConf
+extends Object
+implements scala.Cloneable, scala.Serializable
+Configuration for a Spark application. Used to set various 
Spark parameters as key-value pairs.
+ 
+ Most of the time, you would create a SparkConf object with new 
SparkConf(), which will load
+ values from any spark.* Java system properties set in your 
application as well. In this case,
+ parameters you set directly on the SparkConf object take 
priority over system properties.
+ 
+ For unit tests, you can also call new SparkConf(false) to skip 
loading external settings and
+ get the same configuration no matter what the system properties are.
+ 
+ All setter methods in this class support chaining. For example, you can write
+ new SparkConf().setMaster("local").setAppName("My app").
+ 
+ param:  loadDefaults whether to also load values from Java system properties
+ 
+See Also:Serialized 
FormNote:
+  Once a SparkConf object is passed to Spark, it is cloned and can no 
longer be modified
+ by the user. Spark does not support modifying the configuration at 
runtime.
+
+
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+SparkConf()
+Create a SparkConf that loads defaults from system 
properties and the classpath
+
+
+
+SparkConf(boolean loadDefaults) 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods 
+
+Modifier and Type
+Method and Description
+
+
+SparkConf
+clone()
+Copy this object
+
+
+
+boolean
+contains(String key)
+Does the configuration contain a given parameter?
+
+
+
+String
+get(String key)
+Get a parameter; throws a NoSuchElementException if it's 
not set
+
+
+
+String
+get(String key,
+   String defaultValue)
+Get a parameter, falling back to a default if not set
+
+
+
+scala.Tuple2[]
+getAll()
+Get all parameters as a list of pairs
+
+
+
+scala.Tuple2[]
+getAllWithPrefix(String prefix)
+Get all parameters that start with prefix
+
+
+
+String
+getAppId()
+Returns the Spark application id, valid in the Driver after 
TaskScheduler registration and
+ from the start in the Executor.
+
+
+
+scala.collection.immutable.Map
+getAvroSchema()
+Gets all the avro schemas in the configuration used in the 
generic Avro record serializer
+
+
+
+boolean
+getBoolean(String key,
+  boolean defaultValue)
+Get a parameter as a boolean, falling back to a default if 
not set
+
+
+
+static scala.Option
+getDeprecatedConfig(String key,
+   SparkConf conf)
+Looks for available deprecated keys for the given config 
option, and return the first
+ value available.
+
+
+
+double
+getDouble(String key,
+ double defaultValue)
+Get a parameter as a double, falling back to a default if 
not set
+
+
+
+scala.collection.Seq>
+getExecutorEnv()
+Get all executor environment variables set on this 
SparkConf
+
+
+
+int
+getInt(String key,
+  int defaultValue)
+Get a parameter as an integer, falling back to a default if 
not set
+
+
+
+long
+getLong(String key,
+   long defaultValue)
+Get a parameter as a long, falling back to a default if not 
set
+
+
+
+scala.Option
+getOption(String key)
+Get a parameter as an Option
+
+
+
+long
+getSizeAsBytes(String key)
+Get a size parameter as bytes; throws a 
NoSuchElementException if it's not set.
+
+
+
+long
+getSizeAsBytes(String key,
+  long defaultValue)
+Get a size parameter as bytes, falling back to a default if 
not set.
+
+
+
+long
+getSizeAsBytes(String key,
+  String defaultValue)
+Get