Repository: spark
Updated Branches:
  refs/heads/master ac6fe67e1 -> a51d51ffa


SPARK-5850: Remove experimental label for Scala 2.11 and FlumePollingStream

Author: Patrick Wendell <[email protected]>

Closes #4638 from pwendell/SPARK-5850 and squashes the following commits:

386126f [Patrick Wendell] SPARK-5850: Remove experimental label for Scala 2.11 
and FlumePollingStream.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/a51d51ff
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/a51d51ff
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/a51d51ff

Branch: refs/heads/master
Commit: a51d51ffac00931c80ce93889a98c2f77aef8953
Parents: ac6fe67
Author: Patrick Wendell <[email protected]>
Authored: Mon Feb 16 20:33:33 2015 -0800
Committer: Patrick Wendell <[email protected]>
Committed: Mon Feb 16 20:33:33 2015 -0800

----------------------------------------------------------------------
 docs/building-spark.md                                       | 6 +++---
 docs/streaming-flume-integration.md                          | 2 +-
 .../scala/org/apache/spark/streaming/flume/FlumeUtils.scala  | 8 --------
 3 files changed, 4 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/a51d51ff/docs/building-spark.md
----------------------------------------------------------------------
diff --git a/docs/building-spark.md b/docs/building-spark.md
index 088da7d..4c3988e 100644
--- a/docs/building-spark.md
+++ b/docs/building-spark.md
@@ -111,9 +111,9 @@ To produce a Spark package compiled with Scala 2.11, use 
the `-Dscala-2.11` prop
     dev/change-version-to-2.11.sh
     mvn -Pyarn -Phadoop-2.4 -Dscala-2.11 -DskipTests clean package
 
-Scala 2.11 support in Spark is experimental and does not support a few 
features.
-Specifically, Spark's external Kafka library and JDBC component are not yet
-supported in Scala 2.11 builds.
+Scala 2.11 support in Spark does not support a few features due to dependencies
+which are themselves not Scala 2.11 ready. Specifically, Spark's external 
+Kafka library and JDBC component are not yet supported in Scala 2.11 builds.
 
 # Spark Tests in Maven
 

http://git-wip-us.apache.org/repos/asf/spark/blob/a51d51ff/docs/streaming-flume-integration.md
----------------------------------------------------------------------
diff --git a/docs/streaming-flume-integration.md 
b/docs/streaming-flume-integration.md
index ac01dd3..40e1724 100644
--- a/docs/streaming-flume-integration.md
+++ b/docs/streaming-flume-integration.md
@@ -64,7 +64,7 @@ configuring Flume agents.
 
 3. **Deploying:** Package 
`spark-streaming-flume_{{site.SCALA_BINARY_VERSION}}` and its dependencies 
(except `spark-core_{{site.SCALA_BINARY_VERSION}}` and 
`spark-streaming_{{site.SCALA_BINARY_VERSION}}` which are provided by 
`spark-submit`) into the application JAR. Then use `spark-submit` to launch 
your application (see [Deploying 
section](streaming-programming-guide.html#deploying-applications) in the main 
programming guide).
 
-## Approach 2 (Experimental): Pull-based Approach using a Custom Sink
+## Approach 2: Pull-based Approach using a Custom Sink
 Instead of Flume pushing data directly to Spark Streaming, this approach runs 
a custom Flume sink that allows the following.
 
 - Flume pushes data into the sink, and the data stays buffered.

http://git-wip-us.apache.org/repos/asf/spark/blob/a51d51ff/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeUtils.scala
----------------------------------------------------------------------
diff --git 
a/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeUtils.scala
 
b/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeUtils.scala
index 4b732c1..44dec45 100644
--- 
a/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeUtils.scala
+++ 
b/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeUtils.scala
@@ -19,7 +19,6 @@ package org.apache.spark.streaming.flume
 
 import java.net.InetSocketAddress
 
-import org.apache.spark.annotation.Experimental
 import org.apache.spark.storage.StorageLevel
 import org.apache.spark.streaming.StreamingContext
 import org.apache.spark.streaming.api.java.{JavaReceiverInputDStream, 
JavaStreamingContext}
@@ -121,7 +120,6 @@ object FlumeUtils {
    * @param port Port of the host at which the Spark Sink is listening
    * @param storageLevel Storage level to use for storing the received objects
    */
-  @Experimental
   def createPollingStream(
       ssc: StreamingContext,
       hostname: String,
@@ -138,7 +136,6 @@ object FlumeUtils {
    * @param addresses List of InetSocketAddresses representing the hosts to 
connect to.
    * @param storageLevel Storage level to use for storing the received objects
    */
-  @Experimental
   def createPollingStream(
       ssc: StreamingContext,
       addresses: Seq[InetSocketAddress],
@@ -159,7 +156,6 @@ object FlumeUtils {
    *                    result in this stream using more threads
    * @param storageLevel Storage level to use for storing the received objects
    */
-  @Experimental
   def createPollingStream(
       ssc: StreamingContext,
       addresses: Seq[InetSocketAddress],
@@ -178,7 +174,6 @@ object FlumeUtils {
    * @param hostname Hostname of the host on which the Spark Sink is running
    * @param port     Port of the host at which the Spark Sink is listening
    */
-  @Experimental
   def createPollingStream(
       jssc: JavaStreamingContext,
       hostname: String,
@@ -195,7 +190,6 @@ object FlumeUtils {
    * @param port         Port of the host at which the Spark Sink is listening
    * @param storageLevel Storage level to use for storing the received objects
    */
-  @Experimental
   def createPollingStream(
       jssc: JavaStreamingContext,
       hostname: String,
@@ -212,7 +206,6 @@ object FlumeUtils {
    * @param addresses    List of InetSocketAddresses on which the Spark Sink 
is running.
    * @param storageLevel Storage level to use for storing the received objects
    */
-  @Experimental
   def createPollingStream(
       jssc: JavaStreamingContext,
       addresses: Array[InetSocketAddress],
@@ -233,7 +226,6 @@ object FlumeUtils {
    *                     result in this stream using more threads
    * @param storageLevel Storage level to use for storing the received objects
    */
-  @Experimental
   def createPollingStream(
       jssc: JavaStreamingContext,
       addresses: Array[InetSocketAddress],


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to