Repository: spark
Updated Branches:
  refs/heads/branch-1.1 9b0cff2d4 -> 0c8183cb3


[SPARK-1981][Streaming][Hotfix] Fixed docs related to kinesis

- Include kinesis in the unidocs
- Hide non-public classes from docs

Author: Tathagata Das <tathagata.das1...@gmail.com>

Closes #2239 from tdas/kinesis-doc-fix and squashes the following commits:

156e20c [Tathagata Das] More fixes, based on PR comments.
e9a6c01 [Tathagata Das] Fixed docs related to kinesis

(cherry picked from commit e9bb12bea9fbef94332fbec88e3cd9197a27b7ad)
Signed-off-by: Tathagata Das <tathagata.das1...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/0c8183cb
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/0c8183cb
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/0c8183cb

Branch: refs/heads/branch-1.1
Commit: 0c8183cb30b69902cb6fef220980ea51c4533396
Parents: 9b0cff2
Author: Tathagata Das <tathagata.das1...@gmail.com>
Authored: Tue Sep 2 19:02:48 2014 -0700
Committer: Tathagata Das <tathagata.das1...@gmail.com>
Committed: Tue Sep 2 19:03:04 2014 -0700

----------------------------------------------------------------------
 docs/_plugins/copy_api_dirs.rb                            |  4 ++--
 .../spark/examples/streaming/JavaKinesisWordCountASL.java | 10 +++++-----
 .../spark/examples/streaming/KinesisWordCountASL.scala    |  6 +++---
 .../org/apache/spark/streaming/kinesis/KinesisUtils.scala |  7 ++++---
 project/SparkBuild.scala                                  |  6 +++---
 5 files changed, 17 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/0c8183cb/docs/_plugins/copy_api_dirs.rb
----------------------------------------------------------------------
diff --git a/docs/_plugins/copy_api_dirs.rb b/docs/_plugins/copy_api_dirs.rb
index 2dbbbf6..3b02e09 100644
--- a/docs/_plugins/copy_api_dirs.rb
+++ b/docs/_plugins/copy_api_dirs.rb
@@ -25,8 +25,8 @@ if not (ENV['SKIP_API'] == '1' or ENV['SKIP_SCALADOC'] == '1')
   curr_dir = pwd
   cd("..")
 
-  puts "Running 'sbt/sbt compile unidoc' from " + pwd + "; this may take a few 
minutes..."
-  puts `sbt/sbt compile unidoc`
+  puts "Running 'sbt/sbt -Pkinesis-asl compile unidoc' from " + pwd + "; this 
may take a few minutes..."
+  puts `sbt/sbt -Pkinesis-asl compile unidoc`
 
   puts "Moving back into docs dir."
   cd("docs")

http://git-wip-us.apache.org/repos/asf/spark/blob/0c8183cb/extras/kinesis-asl/src/main/java/org/apache/spark/examples/streaming/JavaKinesisWordCountASL.java
----------------------------------------------------------------------
diff --git 
a/extras/kinesis-asl/src/main/java/org/apache/spark/examples/streaming/JavaKinesisWordCountASL.java
 
b/extras/kinesis-asl/src/main/java/org/apache/spark/examples/streaming/JavaKinesisWordCountASL.java
index a8b907b..f36952b 100644
--- 
a/extras/kinesis-asl/src/main/java/org/apache/spark/examples/streaming/JavaKinesisWordCountASL.java
+++ 
b/extras/kinesis-asl/src/main/java/org/apache/spark/examples/streaming/JavaKinesisWordCountASL.java
@@ -75,7 +75,7 @@ import com.google.common.collect.Lists;
  *   onto the Kinesis stream. 
  * Usage instructions for KinesisWordCountProducerASL are provided in the 
class definition.
  */
-public final class JavaKinesisWordCountASL {
+public final class JavaKinesisWordCountASL { // needs to be public for access 
from run-example
     private static final Pattern WORD_SEPARATOR = Pattern.compile(" ");
     private static final Logger logger = 
Logger.getLogger(JavaKinesisWordCountASL.class);
 
@@ -87,10 +87,10 @@ public final class JavaKinesisWordCountASL {
         /* Check that all required args were passed in. */
         if (args.length < 2) {
           System.err.println(
-              "|Usage: KinesisWordCount <stream-name> <endpoint-url>\n" +
-              "|    <stream-name> is the name of the Kinesis stream\n" +
-              "|    <endpoint-url> is the endpoint of the Kinesis service\n" +
-              "|                   (e.g. 
https://kinesis.us-east-1.amazonaws.com)\n");
+              "Usage: JavaKinesisWordCountASL <stream-name> <endpoint-url>\n" +
+              "    <stream-name> is the name of the Kinesis stream\n" +
+              "    <endpoint-url> is the endpoint of the Kinesis service\n" +
+              "                   (e.g. 
https://kinesis.us-east-1.amazonaws.com)\n");
           System.exit(1);
         }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/0c8183cb/extras/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
----------------------------------------------------------------------
diff --git 
a/extras/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
 
b/extras/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
index d03edf8..fffd90d 100644
--- 
a/extras/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
+++ 
b/extras/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
@@ -69,7 +69,7 @@ import org.apache.log4j.Level
  *   dummy data onto the Kinesis stream.
  * Usage instructions for KinesisWordCountProducerASL are provided in that 
class definition.
  */
-object KinesisWordCountASL extends Logging {
+private object KinesisWordCountASL extends Logging {
   def main(args: Array[String]) {
     /* Check that all required args were passed in. */
     if (args.length < 2) {
@@ -154,7 +154,7 @@ object KinesisWordCountASL extends Logging {
  *         org.apache.spark.examples.streaming.KinesisWordCountProducerASL 
mySparkStream \
  *         https://kinesis.us-east-1.amazonaws.com 10 5
  */
-object KinesisWordCountProducerASL {
+private object KinesisWordCountProducerASL {
   def main(args: Array[String]) {
     if (args.length < 4) {
       System.err.println("Usage: KinesisWordCountProducerASL <stream-name> 
<endpoint-url>" +
@@ -235,7 +235,7 @@ object KinesisWordCountProducerASL {
  *  Utility functions for Spark Streaming examples. 
  *  This has been lifted from the examples/ project to remove the circular 
dependency.
  */
-object StreamingExamples extends Logging {
+private[streaming] object StreamingExamples extends Logging {
 
   /** Set reasonable logging levels for streaming if the user has not 
configured log4j. */
   def setStreamingLogLevels() {

http://git-wip-us.apache.org/repos/asf/spark/blob/0c8183cb/extras/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisUtils.scala
----------------------------------------------------------------------
diff --git 
a/extras/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisUtils.scala
 
b/extras/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisUtils.scala
index 713cac0..96f4399 100644
--- 
a/extras/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisUtils.scala
+++ 
b/extras/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisUtils.scala
@@ -35,7 +35,7 @@ import 
com.amazonaws.services.kinesis.clientlibrary.lib.worker.InitialPositionIn
 object KinesisUtils {
   /**
    * Create an InputDStream that pulls messages from a Kinesis stream.
-   *
+   * :: Experimental ::
    * @param ssc    StreamingContext object
    * @param streamName   Kinesis stream name
    * @param endpointUrl  Url of Kinesis service (e.g., 
https://kinesis.us-east-1.amazonaws.com)
@@ -52,6 +52,7 @@ object KinesisUtils {
    *
    * @return ReceiverInputDStream[Array[Byte]]
    */
+  @Experimental
   def createStream(
       ssc: StreamingContext,
       streamName: String,
@@ -65,9 +66,8 @@ object KinesisUtils {
 
   /**
    * Create a Java-friendly InputDStream that pulls messages from a Kinesis 
stream.
-   *
+   * :: Experimental ::
    * @param jssc Java StreamingContext object
-   * @param ssc    StreamingContext object
    * @param streamName   Kinesis stream name
    * @param endpointUrl  Url of Kinesis service (e.g., 
https://kinesis.us-east-1.amazonaws.com)
    * @param checkpointInterval  Checkpoint interval for Kinesis checkpointing.
@@ -83,6 +83,7 @@ object KinesisUtils {
    *
    * @return JavaReceiverInputDStream[Array[Byte]]
    */
+  @Experimental
   def createStream(
       jssc: JavaStreamingContext, 
       streamName: String, 

http://git-wip-us.apache.org/repos/asf/spark/blob/0c8183cb/project/SparkBuild.scala
----------------------------------------------------------------------
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 49d52ae..486de93 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -290,9 +290,9 @@ object Unidoc {
     publish := {},
 
     unidocProjectFilter in(ScalaUnidoc, unidoc) :=
-      inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, 
catalyst, yarn, yarnAlpha),
+      inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, 
catalyst, streamingFlumeSink, yarn, yarnAlpha),
     unidocProjectFilter in(JavaUnidoc, unidoc) :=
-      inAnyProject -- inProjects(OldDeps.project, repl, bagel, graphx, 
examples, tools, catalyst, yarn, yarnAlpha),
+      inAnyProject -- inProjects(OldDeps.project, repl, bagel, graphx, 
examples, tools, catalyst, streamingFlumeSink, yarn, yarnAlpha),
 
     // Skip class names containing $ and some internal packages in Javadocs
     unidocAllSources in (JavaUnidoc, unidoc) := {
@@ -314,7 +314,7 @@ object Unidoc {
       "-group", "Core Java API", packageList("api.java", "api.java.function"),
       "-group", "Spark Streaming", packageList(
         "streaming.api.java", "streaming.flume", "streaming.kafka",
-        "streaming.mqtt", "streaming.twitter", "streaming.zeromq"
+        "streaming.mqtt", "streaming.twitter", "streaming.zeromq", 
"streaming.kinesis"
       ),
       "-group", "MLlib", packageList(
         "mllib.classification", "mllib.clustering", "mllib.evaluation.binary", 
"mllib.linalg",


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to