Repository: bahir-website
Updated Branches:
  refs/heads/asf-site 32c2860dd -> cc5d26efb


http://git-wip-us.apache.org/repos/asf/bahir-website/blob/cc5d26ef/content/docs/spark/2.1.2/spark-streaming-akka/index.html
----------------------------------------------------------------------
diff --git a/content/docs/spark/2.1.2/spark-streaming-akka/index.html 
b/content/docs/spark/2.1.2/spark-streaming-akka/index.html
new file mode 100644
index 0000000..f1f66ce
--- /dev/null
+++ b/content/docs/spark/2.1.2/spark-streaming-akka/index.html
@@ -0,0 +1,325 @@
+
+
+<!DOCTYPE html>
+<html lang="en">
+  <head>
+    <meta charset="utf-8">
+    <title>Spark Streaming Akka</title>
+    <meta name="description" content="Spark Streaming Akka">
+    <meta name="author" content="">
+
+    <!-- Enable responsive viewport -->
+    <meta name="viewport" content="width=device-width, initial-scale=1.0">
+
+    <!-- Le HTML5 shim, for IE6-8 support of HTML elements -->
+    <!--[if lt IE 9]>
+      <script 
src="http://html5shim.googlecode.com/svn/trunk/html5.js";></script>
+    <![endif]-->
+
+    <!-- Le styles -->
+    <link href="/assets/themes/apache-clean/bootstrap/css/bootstrap.css" 
rel="stylesheet">
+    <link href="/assets/themes/apache-clean/css/style.css?body=1" 
rel="stylesheet" type="text/css">
+    <link href="/assets/themes/apache-clean/css/syntax.css" rel="stylesheet"  
type="text/css" media="screen" />
+    <!-- Le fav and touch icons -->
+    <!-- Update these with your own images
+    <link rel="shortcut icon" href="images/favicon.ico">
+    <link rel="apple-touch-icon" href="images/apple-touch-icon.png">
+    <link rel="apple-touch-icon" sizes="72x72" 
href="images/apple-touch-icon-72x72.png">
+    <link rel="apple-touch-icon" sizes="114x114" 
href="images/apple-touch-icon-114x114.png">
+  -->
+
+    <!-- make tables sortable by adding class tag "sortable" to table elements 
-->
+    <script 
src="http://www.kryogenix.org/code/browser/sorttable/sorttable.js";></script>
+
+
+  </head>
+
+  <body>
+
+    
+
+<!-- Navigation -->
+<div id="nav-bar">
+  <nav id="nav-container" class="navbar navbar-inverse " role="navigation">
+    <div class="container">
+      <!-- Brand and toggle get grouped for better mobile display -->
+
+      <div class="navbar-header page-scroll">
+        <button type="button" class="navbar-toggle" data-toggle="collapse" 
data-target=".navbar-collapse">
+          <span class="sr-only">Toggle navigation</span>
+          <span class="icon-bar"></span>
+          <span class="icon-bar"></span>
+          <span class="icon-bar"></span>
+        </button>
+        <a class="navbar-brand page-scroll" href="/#home">Home</a>
+      </div>
+      <!-- Collect the nav links, forms, and other content for toggling -->
+      <nav class="navbar-collapse collapse" role="navigation">
+        <ul class="nav navbar-nav">
+          
+          
+          
+          <li id="download">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Download<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="/downloads/spark" target="_self">Bahir Spark 
Extensions</a></li>
+              
+              
+              <li><a href="/downloads/flink" target="_self">Bahir Flink 
Extensions</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="community">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Community<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="/community" target="_self">Get Involved</a></li>
+              
+              
+              <li><a href="/contributing" target="_self">Contributing</a></li>
+              
+              
+              <li><a href="/contributing-extensions" 
target="_self">Contributing Extensions</a></li>
+              
+              
+              <li><a href="https://issues.apache.org/jira/browse/BAHIR"; 
target="_blank">Issue Tracker</a></li>
+              
+              
+              <li><a href="/community#source-code" target="_self">Source 
Code</a></li>
+              
+              
+              <li><a href="/community-members" target="_self">Project 
Committers</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="documentation">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Documentation<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="/docs/spark/overview" target="_self">Bahir Spark 
Extensions</a></li>
+              
+              
+              <li><a href="/docs/flink/overview" target="_self">Bahir Flink 
Extensions</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="github">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">GitHub<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="https://github.com/apache/bahir"; 
target="_blank">Bahir Spark Extensions</a></li>
+              
+              
+              <li><a href="https://github.com/apache/bahir-flink"; 
target="_blank">Bahir Flink Extensions</a></li>
+              
+              
+              <li><a href="https://github.com/apache/bahir-website"; 
target="_blank">Bahir Website</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="apache">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Apache<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="http://www.apache.org/foundation/how-it-works.html"; 
target="_blank">Apache Software Foundation</a></li>
+              
+              
+              <li><a href="http://www.apache.org/licenses/"; 
target="_blank">Apache License</a></li>
+              
+              
+              <li><a href="http://www.apache.org/foundation/sponsorship"; 
target="_blank">Sponsorship</a></li>
+              
+              
+              <li><a href="http://www.apache.org/foundation/thanks.html"; 
target="_blank">Thanks</a></li>
+              
+              
+              <li><a href="/privacy-policy" target="_self">Privacy 
Policy</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+        </ul>
+      </nav><!--/.navbar-collapse -->
+      <!-- /.navbar-collapse -->
+    </div>
+    <!-- /.container -->
+  </nav>
+</div>
+
+
+    <div class="container">
+
+      
+
+<!--<div class="hero-unit Spark Streaming Akka">
+  <h1></h1>
+</div>
+-->
+
+<div class="row">
+  <div class="col-md-12">
+    <!--
+
+-->
+
+<p>A library for reading data from Akka Actors using Spark Streaming.</p>
+
+<h2 id="linking">Linking</h2>
+
+<p>Using SBT:</p>
+
+<div class="highlighter-rouge"><pre 
class="highlight"><code>libraryDependencies += "org.apache.bahir" %% 
"spark-streaming-akka" % "2.1.2"
+</code></pre>
+</div>
+
+<p>Using Maven:</p>
+
+<div class="highlighter-rouge"><pre class="highlight"><code>&lt;dependency&gt;
+    &lt;groupId&gt;org.apache.bahir&lt;/groupId&gt;
+    &lt;artifactId&gt;spark-streaming-akka_2.11&lt;/artifactId&gt;
+    &lt;version&gt;2.1.2&lt;/version&gt;
+&lt;/dependency&gt;
+</code></pre>
+</div>
+
+<p>This library can also be added to Spark jobs launched through <code 
class="highlighter-rouge">spark-shell</code> or <code 
class="highlighter-rouge">spark-submit</code> by using the <code 
class="highlighter-rouge">--packages</code> command line option.
+For example, to include it when starting the spark shell:</p>
+
+<div class="highlighter-rouge"><pre class="highlight"><code>$ bin/spark-shell 
--packages org.apache.bahir:spark-streaming-akka_2.11:2.1.2
+</code></pre>
+</div>
+
+<p>Unlike using <code class="highlighter-rouge">--jars</code>, using <code 
class="highlighter-rouge">--packages</code> ensures that this library and its 
dependencies will be added to the classpath.
+The <code class="highlighter-rouge">--packages</code> argument can also be 
used with <code class="highlighter-rouge">bin/spark-submit</code>.</p>
+
+<p>This library is cross-published for Scala 2.10 and Scala 2.11, so users 
should replace the proper Scala version (2.10 or 2.11) in the commands listed 
above.</p>
+
+<h2 id="examples">Examples</h2>
+
+<p>DStreams can be created with data streams received through Akka actors by 
using <code class="highlighter-rouge">AkkaUtils.createStream(ssc, actorProps, 
actor-name)</code>.</p>
+
+<h3 id="scala-api">Scala API</h3>
+
+<p>You need to extend <code class="highlighter-rouge">ActorReceiver</code> so 
as to store received data into Spark using <code 
class="highlighter-rouge">store(...)</code> methods. The supervisor strategy of
+this actor can be configured to handle failures, etc.</p>
+
+<div class="highlighter-rouge"><pre class="highlight"><code>class CustomActor 
extends ActorReceiver {
+  def receive = {
+    case data: String =&gt; store(data)
+  }
+}
+
+// A new input stream can be created with this custom actor as
+val ssc: StreamingContext = ...
+val lines = AkkaUtils.createStream[String](ssc, Props[CustomActor](), 
"CustomReceiver")
+</code></pre>
+</div>
+
+<h3 id="java-api">Java API</h3>
+
+<p>You need to extend <code class="highlighter-rouge">JavaActorReceiver</code> 
so as to store received data into Spark using <code 
class="highlighter-rouge">store(...)</code> methods. The supervisor strategy of
+this actor can be configured to handle failures, etc.</p>
+
+<div class="highlighter-rouge"><pre class="highlight"><code>class CustomActor 
extends JavaActorReceiver {
+    @Override
+    public void onReceive(Object msg) throws Exception {
+        store((String) msg);
+    }
+}
+
+// A new input stream can be created with this custom actor as
+JavaStreamingContext jssc = ...;
+JavaDStream&lt;String&gt; lines = AkkaUtils.&lt;String&gt;createStream(jssc, 
Props.create(CustomActor.class), "CustomReceiver");
+</code></pre>
+</div>
+
+<p>See end-to-end examples at <a 
href="https://github.com/apache/bahir/tree/master/streaming-akka/examples";>Akka 
Examples</a></p>
+
+  </div>
+</div>
+
+
+
+      <hr>
+
+      <!-- <p>&copy; 2018 </p>-->
+      <footer class="site-footer">
+    <div class="wrapper">
+        <div class="footer-col-wrapper">
+            
+            <div style="text-align:center;">
+                
+                <div>
+                    Copyright &copy; 2016-2017 <a 
href="http://www.apache.org";>The Apache Software Foundation</a>.
+                    Licensed under the <a 
href="http://www.apache.org/licenses/LICENSE-2.0";>Apache License, Version
+                    2.0</a>.
+                    <br>
+                    
+                    Apache and the Apache Feather logo are trademarks of The 
Apache Software Foundation.
+                    
+                </div>
+            </div>
+        </div>
+    </div>
+</footer>
+
+    </div>
+
+    
+
+
+  <script type="text/javascript">
+  (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
+  (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new 
Date();a=s.createElement(o),
+  
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
+  })(window,document,'script','//www.google-analytics.com/analytics.js','ga');
+
+  ga('create', 'UA-79140859-1', 'bahir.apache.org');
+  ga('require', 'linkid', 'linkid.js');
+  ga('send', 'pageview');
+
+</script>
+
+
+
+    <script 
src="/assets/themes/apache-clean/jquery/jquery-2.1.1.min.js"></script>
+
+    <script 
src="/assets/themes/apache-clean/bootstrap/js/bootstrap.min.js"></script>
+
+
+  </body>
+</html>
+

http://git-wip-us.apache.org/repos/asf/bahir-website/blob/cc5d26ef/content/docs/spark/2.1.2/spark-streaming-mqtt/index.html
----------------------------------------------------------------------
diff --git a/content/docs/spark/2.1.2/spark-streaming-mqtt/index.html 
b/content/docs/spark/2.1.2/spark-streaming-mqtt/index.html
new file mode 100644
index 0000000..a13574a
--- /dev/null
+++ b/content/docs/spark/2.1.2/spark-streaming-mqtt/index.html
@@ -0,0 +1,348 @@
+
+
+<!DOCTYPE html>
+<html lang="en">
+  <head>
+    <meta charset="utf-8">
+    <title>Spark Structured Streaming MQTT</title>
+    <meta name="description" content="Spark Structured Streaming MQTT">
+    <meta name="author" content="">
+
+    <!-- Enable responsive viewport -->
+    <meta name="viewport" content="width=device-width, initial-scale=1.0">
+
+    <!-- Le HTML5 shim, for IE6-8 support of HTML elements -->
+    <!--[if lt IE 9]>
+      <script 
src="http://html5shim.googlecode.com/svn/trunk/html5.js";></script>
+    <![endif]-->
+
+    <!-- Le styles -->
+    <link href="/assets/themes/apache-clean/bootstrap/css/bootstrap.css" 
rel="stylesheet">
+    <link href="/assets/themes/apache-clean/css/style.css?body=1" 
rel="stylesheet" type="text/css">
+    <link href="/assets/themes/apache-clean/css/syntax.css" rel="stylesheet"  
type="text/css" media="screen" />
+    <!-- Le fav and touch icons -->
+    <!-- Update these with your own images
+    <link rel="shortcut icon" href="images/favicon.ico">
+    <link rel="apple-touch-icon" href="images/apple-touch-icon.png">
+    <link rel="apple-touch-icon" sizes="72x72" 
href="images/apple-touch-icon-72x72.png">
+    <link rel="apple-touch-icon" sizes="114x114" 
href="images/apple-touch-icon-114x114.png">
+  -->
+
+    <!-- make tables sortable by adding class tag "sortable" to table elements 
-->
+    <script 
src="http://www.kryogenix.org/code/browser/sorttable/sorttable.js";></script>
+
+
+  </head>
+
+  <body>
+
+    
+
+<!-- Navigation -->
+<div id="nav-bar">
+  <nav id="nav-container" class="navbar navbar-inverse " role="navigation">
+    <div class="container">
+      <!-- Brand and toggle get grouped for better mobile display -->
+
+      <div class="navbar-header page-scroll">
+        <button type="button" class="navbar-toggle" data-toggle="collapse" 
data-target=".navbar-collapse">
+          <span class="sr-only">Toggle navigation</span>
+          <span class="icon-bar"></span>
+          <span class="icon-bar"></span>
+          <span class="icon-bar"></span>
+        </button>
+        <a class="navbar-brand page-scroll" href="/#home">Home</a>
+      </div>
+      <!-- Collect the nav links, forms, and other content for toggling -->
+      <nav class="navbar-collapse collapse" role="navigation">
+        <ul class="nav navbar-nav">
+          
+          
+          
+          <li id="download">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Download<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="/downloads/spark" target="_self">Bahir Spark 
Extensions</a></li>
+              
+              
+              <li><a href="/downloads/flink" target="_self">Bahir Flink 
Extensions</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="community">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Community<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="/community" target="_self">Get Involved</a></li>
+              
+              
+              <li><a href="/contributing" target="_self">Contributing</a></li>
+              
+              
+              <li><a href="/contributing-extensions" 
target="_self">Contributing Extensions</a></li>
+              
+              
+              <li><a href="https://issues.apache.org/jira/browse/BAHIR"; 
target="_blank">Issue Tracker</a></li>
+              
+              
+              <li><a href="/community#source-code" target="_self">Source 
Code</a></li>
+              
+              
+              <li><a href="/community-members" target="_self">Project 
Committers</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="documentation">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Documentation<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="/docs/spark/overview" target="_self">Bahir Spark 
Extensions</a></li>
+              
+              
+              <li><a href="/docs/flink/overview" target="_self">Bahir Flink 
Extensions</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="github">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">GitHub<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="https://github.com/apache/bahir"; 
target="_blank">Bahir Spark Extensions</a></li>
+              
+              
+              <li><a href="https://github.com/apache/bahir-flink"; 
target="_blank">Bahir Flink Extensions</a></li>
+              
+              
+              <li><a href="https://github.com/apache/bahir-website"; 
target="_blank">Bahir Website</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="apache">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Apache<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="http://www.apache.org/foundation/how-it-works.html"; 
target="_blank">Apache Software Foundation</a></li>
+              
+              
+              <li><a href="http://www.apache.org/licenses/"; 
target="_blank">Apache License</a></li>
+              
+              
+              <li><a href="http://www.apache.org/foundation/sponsorship"; 
target="_blank">Sponsorship</a></li>
+              
+              
+              <li><a href="http://www.apache.org/foundation/thanks.html"; 
target="_blank">Thanks</a></li>
+              
+              
+              <li><a href="/privacy-policy" target="_self">Privacy 
Policy</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+        </ul>
+      </nav><!--/.navbar-collapse -->
+      <!-- /.navbar-collapse -->
+    </div>
+    <!-- /.container -->
+  </nav>
+</div>
+
+
+    <div class="container">
+
+      
+
+<!--<div class="hero-unit Spark Structured Streaming MQTT">
+  <h1></h1>
+</div>
+-->
+
+<div class="row">
+  <div class="col-md-12">
+    <!--
+
+-->
+
+<p><a href="http://mqtt.org/";>MQTT</a> is MQTT is a machine-to-machine 
(M2M)/”Internet of Things” connectivity protocol. It was designed as an 
extremely lightweight publish/subscribe messaging transport. It is useful for 
connections with remote locations where a small code footprint is required 
and/or network bandwidth is at a premium.</p>
+
+<h2 id="linking">Linking</h2>
+
+<p>Using SBT:</p>
+
+<div class="highlighter-rouge"><pre 
class="highlight"><code>libraryDependencies += "org.apache.bahir" %% 
"spark-streaming-mqtt" % "2.1.2"
+</code></pre>
+</div>
+
+<p>Using Maven:</p>
+
+<div class="highlighter-rouge"><pre class="highlight"><code>&lt;dependency&gt;
+    &lt;groupId&gt;org.apache.bahir&lt;/groupId&gt;
+    &lt;artifactId&gt;spark-streaming-mqtt_2.11&lt;/artifactId&gt;
+    &lt;version&gt;2.1.2&lt;/version&gt;
+&lt;/dependency&gt;
+</code></pre>
+</div>
+
+<p>This library can also be added to Spark jobs launched through <code 
class="highlighter-rouge">spark-shell</code> or <code 
class="highlighter-rouge">spark-submit</code> by using the <code 
class="highlighter-rouge">--packages</code> command line option.
+For example, to include it when starting the spark shell:</p>
+
+<div class="highlighter-rouge"><pre class="highlight"><code>$ bin/spark-shell 
--packages org.apache.bahir:spark-streaming-mqtt_2.11:2.1.2
+</code></pre>
+</div>
+
+<p>Unlike using <code class="highlighter-rouge">--jars</code>, using <code 
class="highlighter-rouge">--packages</code> ensures that this library and its 
dependencies will be added to the classpath.
+The <code class="highlighter-rouge">--packages</code> argument can also be 
used with <code class="highlighter-rouge">bin/spark-submit</code>.</p>
+
+<p>This library is cross-published for Scala 2.10 and Scala 2.11, so users 
should replace the proper Scala version (2.10 or 2.11) in the commands listed 
above.</p>
+
+<h2 id="configuration-options">Configuration options.</h2>
+
+<p>This source uses the <a 
href="https://eclipse.org/paho/clients/java/";>Eclipse Paho Java Client</a>. 
Client API documentation is located <a 
href="http://www.eclipse.org/paho/files/javadoc/index.html";>here</a>.</p>
+
+<ul>
+  <li><code class="highlighter-rouge">brokerUrl</code> A url MqttClient 
connects to. Set this as the url of the Mqtt Server. e.g. 
tcp://localhost:1883.</li>
+  <li><code class="highlighter-rouge">storageLevel</code> By default it is 
used for storing incoming messages on disk.</li>
+  <li><code class="highlighter-rouge">topic</code> Topic MqttClient subscribes 
to.</li>
+  <li><code class="highlighter-rouge">topics</code> List of topics MqttClient 
subscribes to.</li>
+  <li><code class="highlighter-rouge">clientId</code> clientId, this client is 
assoicated with. Provide the same value to recover a stopped client.</li>
+  <li><code class="highlighter-rouge">QoS</code> The maximum quality of 
service to subscribe each topic at. Messages published at a lower quality of 
service will be received at the published QoS. Messages published at a higher 
quality of service will be received using the QoS specified on the 
subscribe.</li>
+  <li><code class="highlighter-rouge">username</code> Sets the user name to 
use for the connection to Mqtt Server. Do not set it, if server does not need 
this. Setting it empty will lead to errors.</li>
+  <li><code class="highlighter-rouge">password</code> Sets the password to use 
for the connection.</li>
+  <li><code class="highlighter-rouge">cleanSession</code> Setting it true 
starts a clean session, removes all checkpointed messages by a previous run of 
this source. This is set to false by default.</li>
+  <li><code class="highlighter-rouge">connectionTimeout</code> Sets the 
connection timeout, a value of 0 is interpreted as wait until client connects. 
See <code 
class="highlighter-rouge">MqttConnectOptions.setConnectionTimeout</code> for 
more information.</li>
+  <li><code class="highlighter-rouge">keepAlive</code> Same as <code 
class="highlighter-rouge">MqttConnectOptions.setKeepAliveInterval</code>.</li>
+  <li><code class="highlighter-rouge">mqttVersion</code> Same as <code 
class="highlighter-rouge">MqttConnectOptions.setMqttVersion</code>.</li>
+</ul>
+
+<h2 id="examples">Examples</h2>
+
+<h3 id="scala-api">Scala API</h3>
+
+<p>You need to extend <code class="highlighter-rouge">ActorReceiver</code> so 
as to store received data into Spark using <code 
class="highlighter-rouge">store(...)</code> methods. The supervisor strategy of
+this actor can be configured to handle failures, etc.</p>
+
+<div class="highlighter-rouge"><pre class="highlight"><code>val lines = 
MQTTUtils.createStream(ssc, brokerUrl, topic)
+val lines = MQTTUtils.createPairedStream(ssc, brokerUrl, topic)
+val lines = MQTTUtils.createPairedByteArrayStream(ssc, brokerUrl, topic)
+</code></pre>
+</div>
+
+<p>Additional mqtt connection options can be provided:</p>
+
+<pre><code class="language-Scala">val lines = MQTTUtils.createStream(ssc, 
brokerUrl, topic, storageLevel, clientId, username, password, cleanSession, 
qos, connectionTimeout, keepAliveInterval, mqttVersion)
+val lines = MQTTUtils.createPairedStream(ssc, brokerUrl, topics, storageLevel, 
clientId, username, password, cleanSession, qos, connectionTimeout, 
keepAliveInterval, mqttVersion)
+val lines = MQTTUtils.createPairedByteArrayStream(ssc, brokerUrl, topics, 
storageLevel, clientId, username, password, cleanSession, qos, 
connectionTimeout, keepAliveInterval, mqttVersion)
+</code></pre>
+
+<h3 id="java-api">Java API</h3>
+
+<p>You need to extend <code class="highlighter-rouge">JavaActorReceiver</code> 
so as to store received data into Spark using <code 
class="highlighter-rouge">store(...)</code> methods. The supervisor strategy of
+this actor can be configured to handle failures, etc.</p>
+
+<div class="highlighter-rouge"><pre 
class="highlight"><code>JavaDStream&lt;String&gt; lines = 
MQTTUtils.createStream(jssc, brokerUrl, topic);
+JavaReceiverInputDStream&lt;Tuple2&lt;String, String&gt;&gt; lines = 
MQTTUtils.createPairedStream(jssc, brokerUrl, topics);
+JavaReceiverInputDStream&lt;Tuple2&lt;String, String&gt;&gt; lines = 
MQTTUtils.createPairedByteArrayStream(jssc, brokerUrl, topics);
+</code></pre>
+</div>
+
+<p>See end-to-end examples at <a 
href="https://github.com/apache/bahir/tree/master/streaming-mqtt/examples";>MQTT 
Examples</a></p>
+
+<h3 id="python-api">Python API</h3>
+
+<p>Create a DStream from a single topic.</p>
+
+<pre><code class="language-Python">    MQTTUtils.createStream(ssc, broker_url, 
topic)
+</code></pre>
+
+<p>Create a DStream from a list of topics.</p>
+
+<pre><code class="language-Python">    MQTTUtils.createPairedStream(ssc, 
broker_url, topics)
+</code></pre>
+
+  </div>
+</div>
+
+
+
+      <hr>
+
+      <!-- <p>&copy; 2018 </p>-->
+      <footer class="site-footer">
+    <div class="wrapper">
+        <div class="footer-col-wrapper">
+            
+            <div style="text-align:center;">
+                
+                <div>
+                    Copyright &copy; 2016-2017 <a 
href="http://www.apache.org";>The Apache Software Foundation</a>.
+                    Licensed under the <a 
href="http://www.apache.org/licenses/LICENSE-2.0";>Apache License, Version
+                    2.0</a>.
+                    <br>
+                    
+                    Apache and the Apache Feather logo are trademarks of The 
Apache Software Foundation.
+                    
+                </div>
+            </div>
+        </div>
+    </div>
+</footer>
+
+    </div>
+
+    
+
+
+  <script type="text/javascript">
+  (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
+  (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new 
Date();a=s.createElement(o),
+  
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
+  })(window,document,'script','//www.google-analytics.com/analytics.js','ga');
+
+  ga('create', 'UA-79140859-1', 'bahir.apache.org');
+  ga('require', 'linkid', 'linkid.js');
+  ga('send', 'pageview');
+
+</script>
+
+
+
+    <script 
src="/assets/themes/apache-clean/jquery/jquery-2.1.1.min.js"></script>
+
+    <script 
src="/assets/themes/apache-clean/bootstrap/js/bootstrap.min.js"></script>
+
+
+  </body>
+</html>
+

http://git-wip-us.apache.org/repos/asf/bahir-website/blob/cc5d26ef/content/docs/spark/2.1.2/spark-streaming-pubsub/index.html
----------------------------------------------------------------------
diff --git a/content/docs/spark/2.1.2/spark-streaming-pubsub/index.html 
b/content/docs/spark/2.1.2/spark-streaming-pubsub/index.html
new file mode 100644
index 0000000..7fa3d68
--- /dev/null
+++ b/content/docs/spark/2.1.2/spark-streaming-pubsub/index.html
@@ -0,0 +1,335 @@
+
+
+<!DOCTYPE html>
+<html lang="en">
+  <head>
+    <meta charset="utf-8">
+    <title>Spark Streaming Google Pub-Sub</title>
+    <meta name="description" content="Spark Streaming Google Pub-Sub">
+    <meta name="author" content="">
+
+    <!-- Enable responsive viewport -->
+    <meta name="viewport" content="width=device-width, initial-scale=1.0">
+
+    <!-- Le HTML5 shim, for IE6-8 support of HTML elements -->
+    <!--[if lt IE 9]>
+      <script 
src="http://html5shim.googlecode.com/svn/trunk/html5.js";></script>
+    <![endif]-->
+
+    <!-- Le styles -->
+    <link href="/assets/themes/apache-clean/bootstrap/css/bootstrap.css" 
rel="stylesheet">
+    <link href="/assets/themes/apache-clean/css/style.css?body=1" 
rel="stylesheet" type="text/css">
+    <link href="/assets/themes/apache-clean/css/syntax.css" rel="stylesheet"  
type="text/css" media="screen" />
+    <!-- Le fav and touch icons -->
+    <!-- Update these with your own images
+    <link rel="shortcut icon" href="images/favicon.ico">
+    <link rel="apple-touch-icon" href="images/apple-touch-icon.png">
+    <link rel="apple-touch-icon" sizes="72x72" 
href="images/apple-touch-icon-72x72.png">
+    <link rel="apple-touch-icon" sizes="114x114" 
href="images/apple-touch-icon-114x114.png">
+  -->
+
+    <!-- make tables sortable by adding class tag "sortable" to table elements 
-->
+    <script 
src="http://www.kryogenix.org/code/browser/sorttable/sorttable.js";></script>
+
+
+  </head>
+
+  <body>
+
+    
+
+<!-- Navigation -->
+<div id="nav-bar">
+  <nav id="nav-container" class="navbar navbar-inverse " role="navigation">
+    <div class="container">
+      <!-- Brand and toggle get grouped for better mobile display -->
+
+      <div class="navbar-header page-scroll">
+        <button type="button" class="navbar-toggle" data-toggle="collapse" 
data-target=".navbar-collapse">
+          <span class="sr-only">Toggle navigation</span>
+          <span class="icon-bar"></span>
+          <span class="icon-bar"></span>
+          <span class="icon-bar"></span>
+        </button>
+        <a class="navbar-brand page-scroll" href="/#home">Home</a>
+      </div>
+      <!-- Collect the nav links, forms, and other content for toggling -->
+      <nav class="navbar-collapse collapse" role="navigation">
+        <ul class="nav navbar-nav">
+          
+          
+          
+          <li id="download">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Download<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="/downloads/spark" target="_self">Bahir Spark 
Extensions</a></li>
+              
+              
+              <li><a href="/downloads/flink" target="_self">Bahir Flink 
Extensions</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="community">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Community<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="/community" target="_self">Get Involved</a></li>
+              
+              
+              <li><a href="/contributing" target="_self">Contributing</a></li>
+              
+              
+              <li><a href="/contributing-extensions" 
target="_self">Contributing Extensions</a></li>
+              
+              
+              <li><a href="https://issues.apache.org/jira/browse/BAHIR"; 
target="_blank">Issue Tracker</a></li>
+              
+              
+              <li><a href="/community#source-code" target="_self">Source 
Code</a></li>
+              
+              
+              <li><a href="/community-members" target="_self">Project 
Committers</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="documentation">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Documentation<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="/docs/spark/overview" target="_self">Bahir Spark 
Extensions</a></li>
+              
+              
+              <li><a href="/docs/flink/overview" target="_self">Bahir Flink 
Extensions</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="github">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">GitHub<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="https://github.com/apache/bahir"; 
target="_blank">Bahir Spark Extensions</a></li>
+              
+              
+              <li><a href="https://github.com/apache/bahir-flink"; 
target="_blank">Bahir Flink Extensions</a></li>
+              
+              
+              <li><a href="https://github.com/apache/bahir-website"; 
target="_blank">Bahir Website</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="apache">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Apache<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="http://www.apache.org/foundation/how-it-works.html"; 
target="_blank">Apache Software Foundation</a></li>
+              
+              
+              <li><a href="http://www.apache.org/licenses/"; 
target="_blank">Apache License</a></li>
+              
+              
+              <li><a href="http://www.apache.org/foundation/sponsorship"; 
target="_blank">Sponsorship</a></li>
+              
+              
+              <li><a href="http://www.apache.org/foundation/thanks.html"; 
target="_blank">Thanks</a></li>
+              
+              
+              <li><a href="/privacy-policy" target="_self">Privacy 
Policy</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+        </ul>
+      </nav><!--/.navbar-collapse -->
+      <!-- /.navbar-collapse -->
+    </div>
+    <!-- /.container -->
+  </nav>
+</div>
+
+
+    <div class="container">
+
+      
+
+<!--<div class="hero-unit Spark Streaming Google Pub-Sub">
+  <h1></h1>
+</div>
+-->
+
+<div class="row">
+  <div class="col-md-12">
+    <!--
+
+-->
+
+<p>A library for reading data from <a 
href="https://cloud.google.com/pubsub/";>Google Cloud Pub/Sub</a> using Spark 
Streaming.</p>
+
+<h2 id="linking">Linking</h2>
+
+<p>Using SBT:</p>
+
+<div class="highlighter-rouge"><pre 
class="highlight"><code>libraryDependencies += "org.apache.bahir" %% 
"spark-streaming-pubsub" % "2.1.2"
+</code></pre>
+</div>
+
+<p>Using Maven:</p>
+
+<div class="highlighter-rouge"><pre class="highlight"><code>&lt;dependency&gt;
+    &lt;groupId&gt;org.apache.bahir&lt;/groupId&gt;
+    &lt;artifactId&gt;spark-streaming-pubsub_2.11&lt;/artifactId&gt;
+    &lt;version&gt;2.1.2&lt;/version&gt;
+&lt;/dependency&gt;
+</code></pre>
+</div>
+
+<p>This library can also be added to Spark jobs launched through <code 
class="highlighter-rouge">spark-shell</code> or <code 
class="highlighter-rouge">spark-submit</code> by using the <code 
class="highlighter-rouge">--packages</code> command line option.
+For example, to include it when starting the spark shell:</p>
+
+<div class="highlighter-rouge"><pre class="highlight"><code>$ bin/spark-shell 
--packages org.apache.bahir:spark-streaming-pubsub_2.11:2.1.2
+</code></pre>
+</div>
+
+<p>Unlike using <code class="highlighter-rouge">--jars</code>, using <code 
class="highlighter-rouge">--packages</code> ensures that this library and its 
dependencies will be added to the classpath.
+The <code class="highlighter-rouge">--packages</code> argument can also be 
used with <code class="highlighter-rouge">bin/spark-submit</code>.</p>
+
+<h2 id="examples">Examples</h2>
+
+<p>First you need to create credential by SparkGCPCredentials, it support four 
type of credentials
+* application default
+    <code class="highlighter-rouge">SparkGCPCredentials.builder.build()</code>
+* json type service account
+    <code 
class="highlighter-rouge">SparkGCPCredentials.builder.jsonServiceAccount(PATH_TO_JSON_KEY).build()</code>
+* p12 type service account
+    <code 
class="highlighter-rouge">SparkGCPCredentials.builder.p12ServiceAccount(PATH_TO_P12_KEY,
 EMAIL_ACCOUNT).build()</code>
+* metadata service account(running on dataproc)
+    <code 
class="highlighter-rouge">SparkGCPCredentials.builder.metadataServiceAccount().build()</code></p>
+
+<h3 id="scala-api">Scala API</h3>
+
+<div class="highlighter-rouge"><pre class="highlight"><code>val lines = 
PubsubUtils.createStream(ssc, projectId, subscriptionName, credential, ..)
+</code></pre>
+</div>
+
+<h3 id="java-api">Java API</h3>
+
+<div class="highlighter-rouge"><pre 
class="highlight"><code>JavaDStream&lt;SparkPubsubMessage&gt; lines = 
PubsubUtils.createStream(jssc, projectId, subscriptionName, credential...) 
+</code></pre>
+</div>
+
+<p>See end-to-end examples at <a href="streaming-pubsub/examples">Google Cloud 
Pubsub Examples</a></p>
+
+<h3 id="unit-test">Unit Test</h3>
+
+<p>To run the PubSub test cases, you need to generate <strong>Google API 
service account key files</strong> and set the corresponding environment 
variable to enable the test.</p>
+
+<h4 id="to-generate-a-service-account-key-file-with-pubsub-permission">To 
generate a service account key file with PubSub permission</h4>
+
+<ol>
+  <li>Go to <a href="console.cloud.google.com">Google API Console</a></li>
+  <li>Choose the <code class="highlighter-rouge">Credentials</code> Tab&gt; 
<code class="highlighter-rouge">Create credentials</code> button&gt; <code 
class="highlighter-rouge">Service account key</code></li>
+  <li>Fill the account name, assign <code class="highlighter-rouge">Role&gt; 
Pub/Sub&gt; Pub/Sub Editor</code> and check the option <code 
class="highlighter-rouge">Furnish a private key</code> to create one. You need 
to create one for JSON key file, another for P12.</li>
+  <li>The account email is the <code class="highlighter-rouge">Service account 
ID</code></li>
+</ol>
+
+<h4 id="setting-the-environment-variables-and-run-test">Setting the 
environment variables and run test</h4>
+
+<div class="highlighter-rouge"><pre class="highlight"><code>mvn clean package 
-DskipTests -pl streaming-pubsub
+
+export ENABLE_PUBSUB_TESTS=1
+export GCP_TEST_ACCOUNT="THE_P12_SERVICE_ACCOUNT_ID_MENTIONED_ABOVE"
+export GCP_TEST_PROJECT_ID="YOUR_GCP_PROJECT_ID"
+export 
GCP_TEST_JSON_KEY_PATH=/path/to/pubsub/credential/files/Apache-Bahir-PubSub-1234abcd.json
+export 
GCP_TEST_P12_KEY_PATH=/path/to/pubsub/credential/files/Apache-Bahir-PubSub-5678efgh.p12
+
+mvn test -pl streaming-pubsub
+</code></pre>
+</div>
+
+  </div>
+</div>
+
+
+
+      <hr>
+
+      <!-- <p>&copy; 2018 </p>-->
+      <footer class="site-footer">
+    <div class="wrapper">
+        <div class="footer-col-wrapper">
+            
+            <div style="text-align:center;">
+                
+                <div>
+                    Copyright &copy; 2016-2017 <a 
href="http://www.apache.org";>The Apache Software Foundation</a>.
+                    Licensed under the <a 
href="http://www.apache.org/licenses/LICENSE-2.0";>Apache License, Version
+                    2.0</a>.
+                    <br>
+                    
+                    Apache and the Apache Feather logo are trademarks of The 
Apache Software Foundation.
+                    
+                </div>
+            </div>
+        </div>
+    </div>
+</footer>
+
+    </div>
+
+    
+
+
+  <script type="text/javascript">
+  (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
+  (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new 
Date();a=s.createElement(o),
+  
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
+  })(window,document,'script','//www.google-analytics.com/analytics.js','ga');
+
+  ga('create', 'UA-79140859-1', 'bahir.apache.org');
+  ga('require', 'linkid', 'linkid.js');
+  ga('send', 'pageview');
+
+</script>
+
+
+
+    <script 
src="/assets/themes/apache-clean/jquery/jquery-2.1.1.min.js"></script>
+
+    <script 
src="/assets/themes/apache-clean/bootstrap/js/bootstrap.min.js"></script>
+
+
+  </body>
+</html>
+

http://git-wip-us.apache.org/repos/asf/bahir-website/blob/cc5d26ef/content/docs/spark/2.1.2/spark-streaming-twitter/index.html
----------------------------------------------------------------------
diff --git a/content/docs/spark/2.1.2/spark-streaming-twitter/index.html 
b/content/docs/spark/2.1.2/spark-streaming-twitter/index.html
new file mode 100644
index 0000000..f364c1b
--- /dev/null
+++ b/content/docs/spark/2.1.2/spark-streaming-twitter/index.html
@@ -0,0 +1,308 @@
+
+
+<!DOCTYPE html>
+<html lang="en">
+  <head>
+    <meta charset="utf-8">
+    <title>Spark Streaming Twitter</title>
+    <meta name="description" content="Spark Streaming Twitter">
+    <meta name="author" content="">
+
+    <!-- Enable responsive viewport -->
+    <meta name="viewport" content="width=device-width, initial-scale=1.0">
+
+    <!-- Le HTML5 shim, for IE6-8 support of HTML elements -->
+    <!--[if lt IE 9]>
+      <script 
src="http://html5shim.googlecode.com/svn/trunk/html5.js";></script>
+    <![endif]-->
+
+    <!-- Le styles -->
+    <link href="/assets/themes/apache-clean/bootstrap/css/bootstrap.css" 
rel="stylesheet">
+    <link href="/assets/themes/apache-clean/css/style.css?body=1" 
rel="stylesheet" type="text/css">
+    <link href="/assets/themes/apache-clean/css/syntax.css" rel="stylesheet"  
type="text/css" media="screen" />
+    <!-- Le fav and touch icons -->
+    <!-- Update these with your own images
+    <link rel="shortcut icon" href="images/favicon.ico">
+    <link rel="apple-touch-icon" href="images/apple-touch-icon.png">
+    <link rel="apple-touch-icon" sizes="72x72" 
href="images/apple-touch-icon-72x72.png">
+    <link rel="apple-touch-icon" sizes="114x114" 
href="images/apple-touch-icon-114x114.png">
+  -->
+
+    <!-- make tables sortable by adding class tag "sortable" to table elements 
-->
+    <script 
src="http://www.kryogenix.org/code/browser/sorttable/sorttable.js";></script>
+
+
+  </head>
+
+  <body>
+
+    
+
+<!-- Navigation -->
+<div id="nav-bar">
+  <nav id="nav-container" class="navbar navbar-inverse " role="navigation">
+    <div class="container">
+      <!-- Brand and toggle get grouped for better mobile display -->
+
+      <div class="navbar-header page-scroll">
+        <button type="button" class="navbar-toggle" data-toggle="collapse" 
data-target=".navbar-collapse">
+          <span class="sr-only">Toggle navigation</span>
+          <span class="icon-bar"></span>
+          <span class="icon-bar"></span>
+          <span class="icon-bar"></span>
+        </button>
+        <a class="navbar-brand page-scroll" href="/#home">Home</a>
+      </div>
+      <!-- Collect the nav links, forms, and other content for toggling -->
+      <nav class="navbar-collapse collapse" role="navigation">
+        <ul class="nav navbar-nav">
+          
+          
+          
+          <li id="download">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Download<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="/downloads/spark" target="_self">Bahir Spark 
Extensions</a></li>
+              
+              
+              <li><a href="/downloads/flink" target="_self">Bahir Flink 
Extensions</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="community">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Community<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="/community" target="_self">Get Involved</a></li>
+              
+              
+              <li><a href="/contributing" target="_self">Contributing</a></li>
+              
+              
+              <li><a href="/contributing-extensions" 
target="_self">Contributing Extensions</a></li>
+              
+              
+              <li><a href="https://issues.apache.org/jira/browse/BAHIR"; 
target="_blank">Issue Tracker</a></li>
+              
+              
+              <li><a href="/community#source-code" target="_self">Source 
Code</a></li>
+              
+              
+              <li><a href="/community-members" target="_self">Project 
Committers</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="documentation">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Documentation<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="/docs/spark/overview" target="_self">Bahir Spark 
Extensions</a></li>
+              
+              
+              <li><a href="/docs/flink/overview" target="_self">Bahir Flink 
Extensions</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="github">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">GitHub<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="https://github.com/apache/bahir"; 
target="_blank">Bahir Spark Extensions</a></li>
+              
+              
+              <li><a href="https://github.com/apache/bahir-flink"; 
target="_blank">Bahir Flink Extensions</a></li>
+              
+              
+              <li><a href="https://github.com/apache/bahir-website"; 
target="_blank">Bahir Website</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="apache">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Apache<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="http://www.apache.org/foundation/how-it-works.html"; 
target="_blank">Apache Software Foundation</a></li>
+              
+              
+              <li><a href="http://www.apache.org/licenses/"; 
target="_blank">Apache License</a></li>
+              
+              
+              <li><a href="http://www.apache.org/foundation/sponsorship"; 
target="_blank">Sponsorship</a></li>
+              
+              
+              <li><a href="http://www.apache.org/foundation/thanks.html"; 
target="_blank">Thanks</a></li>
+              
+              
+              <li><a href="/privacy-policy" target="_self">Privacy 
Policy</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+        </ul>
+      </nav><!--/.navbar-collapse -->
+      <!-- /.navbar-collapse -->
+    </div>
+    <!-- /.container -->
+  </nav>
+</div>
+
+
+    <div class="container">
+
+      
+
+<!--<div class="hero-unit Spark Streaming Twitter">
+  <h1></h1>
+</div>
+-->
+
+<div class="row">
+  <div class="col-md-12">
+    <!--
+
+-->
+
+<p>A library for reading social data from <a 
href="http://twitter.com/";>twitter</a> using Spark Streaming.</p>
+
+<h2 id="linking">Linking</h2>
+
+<p>Using SBT:</p>
+
+<div class="highlighter-rouge"><pre 
class="highlight"><code>libraryDependencies += "org.apache.bahir" %% 
"spark-streaming-twitter" % "2.1.2"
+</code></pre>
+</div>
+
+<p>Using Maven:</p>
+
+<div class="highlighter-rouge"><pre class="highlight"><code>&lt;dependency&gt;
+    &lt;groupId&gt;org.apache.bahir&lt;/groupId&gt;
+    &lt;artifactId&gt;spark-streaming-twitter_2.11&lt;/artifactId&gt;
+    &lt;version&gt;2.1.2&lt;/version&gt;
+&lt;/dependency&gt;
+</code></pre>
+</div>
+
+<p>This library can also be added to Spark jobs launched through <code 
class="highlighter-rouge">spark-shell</code> or <code 
class="highlighter-rouge">spark-submit</code> by using the <code 
class="highlighter-rouge">--packages</code> command line option.
+For example, to include it when starting the spark shell:</p>
+
+<div class="highlighter-rouge"><pre class="highlight"><code>$ bin/spark-shell 
--packages org.apache.bahir:spark-streaming-twitter_2.11:2.1.2
+</code></pre>
+</div>
+
+<p>Unlike using <code class="highlighter-rouge">--jars</code>, using <code 
class="highlighter-rouge">--packages</code> ensures that this library and its 
dependencies will be added to the classpath.
+The <code class="highlighter-rouge">--packages</code> argument can also be 
used with <code class="highlighter-rouge">bin/spark-submit</code>.</p>
+
+<p>This library is cross-published for Scala 2.10 and Scala 2.11, so users 
should replace the proper Scala version (2.10 or 2.11) in the commands listed 
above.</p>
+
+<h2 id="examples">Examples</h2>
+
+<p><code class="highlighter-rouge">TwitterUtils</code> uses Twitter4j to get 
the public stream of tweets using <a 
href="https://dev.twitter.com/docs/streaming-apis";>Twitter’s Streaming 
API</a>. Authentication information
+can be provided by any of the <a 
href="http://twitter4j.org/en/configuration.html";>methods</a> supported by 
Twitter4J library. You can import the <code 
class="highlighter-rouge">TwitterUtils</code> class and create a DStream with 
<code class="highlighter-rouge">TwitterUtils.createStream</code> as shown 
below.</p>
+
+<h3 id="scala-api">Scala API</h3>
+
+<div class="highlighter-rouge"><pre class="highlight"><code>import 
org.apache.spark.streaming.twitter._
+
+TwitterUtils.createStream(ssc, None)
+</code></pre>
+</div>
+
+<h3 id="java-api">Java API</h3>
+
+<div class="highlighter-rouge"><pre class="highlight"><code>import 
org.apache.spark.streaming.twitter.*;
+
+TwitterUtils.createStream(jssc);
+</code></pre>
+</div>
+
+<p>You can also either get the public stream, or get the filtered stream based 
on keywords. 
+See end-to-end examples at <a 
href="https://github.com/apache/bahir/tree/master/streaming-twitter/examples";>Twitter
 Examples</a></p>
+
+  </div>
+</div>
+
+
+
+      <hr>
+
+      <!-- <p>&copy; 2018 </p>-->
+      <footer class="site-footer">
+    <div class="wrapper">
+        <div class="footer-col-wrapper">
+            
+            <div style="text-align:center;">
+                
+                <div>
+                    Copyright &copy; 2016-2017 <a 
href="http://www.apache.org";>The Apache Software Foundation</a>.
+                    Licensed under the <a 
href="http://www.apache.org/licenses/LICENSE-2.0";>Apache License, Version
+                    2.0</a>.
+                    <br>
+                    
+                    Apache and the Apache Feather logo are trademarks of The 
Apache Software Foundation.
+                    
+                </div>
+            </div>
+        </div>
+    </div>
+</footer>
+
+    </div>
+
+    
+
+
+  <script type="text/javascript">
+  (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
+  (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new 
Date();a=s.createElement(o),
+  
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
+  })(window,document,'script','//www.google-analytics.com/analytics.js','ga');
+
+  ga('create', 'UA-79140859-1', 'bahir.apache.org');
+  ga('require', 'linkid', 'linkid.js');
+  ga('send', 'pageview');
+
+</script>
+
+
+
+    <script 
src="/assets/themes/apache-clean/jquery/jquery-2.1.1.min.js"></script>
+
+    <script 
src="/assets/themes/apache-clean/bootstrap/js/bootstrap.min.js"></script>
+
+
+  </body>
+</html>
+

http://git-wip-us.apache.org/repos/asf/bahir-website/blob/cc5d26ef/content/docs/spark/2.1.2/spark-streaming-zeromq/index.html
----------------------------------------------------------------------
diff --git a/content/docs/spark/2.1.2/spark-streaming-zeromq/index.html 
b/content/docs/spark/2.1.2/spark-streaming-zeromq/index.html
new file mode 100644
index 0000000..4a6a193
--- /dev/null
+++ b/content/docs/spark/2.1.2/spark-streaming-zeromq/index.html
@@ -0,0 +1,300 @@
+
+
+<!DOCTYPE html>
+<html lang="en">
+  <head>
+    <meta charset="utf-8">
+    <title>Spark Streaming ZeroMQ</title>
+    <meta name="description" content="Spark Streaming ZeroMQ">
+    <meta name="author" content="">
+
+    <!-- Enable responsive viewport -->
+    <meta name="viewport" content="width=device-width, initial-scale=1.0">
+
+    <!-- Le HTML5 shim, for IE6-8 support of HTML elements -->
+    <!--[if lt IE 9]>
+      <script 
src="http://html5shim.googlecode.com/svn/trunk/html5.js";></script>
+    <![endif]-->
+
+    <!-- Le styles -->
+    <link href="/assets/themes/apache-clean/bootstrap/css/bootstrap.css" 
rel="stylesheet">
+    <link href="/assets/themes/apache-clean/css/style.css?body=1" 
rel="stylesheet" type="text/css">
+    <link href="/assets/themes/apache-clean/css/syntax.css" rel="stylesheet"  
type="text/css" media="screen" />
+    <!-- Le fav and touch icons -->
+    <!-- Update these with your own images
+    <link rel="shortcut icon" href="images/favicon.ico">
+    <link rel="apple-touch-icon" href="images/apple-touch-icon.png">
+    <link rel="apple-touch-icon" sizes="72x72" 
href="images/apple-touch-icon-72x72.png">
+    <link rel="apple-touch-icon" sizes="114x114" 
href="images/apple-touch-icon-114x114.png">
+  -->
+
+    <!-- make tables sortable by adding class tag "sortable" to table elements 
-->
+    <script 
src="http://www.kryogenix.org/code/browser/sorttable/sorttable.js";></script>
+
+
+  </head>
+
+  <body>
+
+    
+
+<!-- Navigation -->
+<div id="nav-bar">
+  <nav id="nav-container" class="navbar navbar-inverse " role="navigation">
+    <div class="container">
+      <!-- Brand and toggle get grouped for better mobile display -->
+
+      <div class="navbar-header page-scroll">
+        <button type="button" class="navbar-toggle" data-toggle="collapse" 
data-target=".navbar-collapse">
+          <span class="sr-only">Toggle navigation</span>
+          <span class="icon-bar"></span>
+          <span class="icon-bar"></span>
+          <span class="icon-bar"></span>
+        </button>
+        <a class="navbar-brand page-scroll" href="/#home">Home</a>
+      </div>
+      <!-- Collect the nav links, forms, and other content for toggling -->
+      <nav class="navbar-collapse collapse" role="navigation">
+        <ul class="nav navbar-nav">
+          
+          
+          
+          <li id="download">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Download<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="/downloads/spark" target="_self">Bahir Spark 
Extensions</a></li>
+              
+              
+              <li><a href="/downloads/flink" target="_self">Bahir Flink 
Extensions</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="community">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Community<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="/community" target="_self">Get Involved</a></li>
+              
+              
+              <li><a href="/contributing" target="_self">Contributing</a></li>
+              
+              
+              <li><a href="/contributing-extensions" 
target="_self">Contributing Extensions</a></li>
+              
+              
+              <li><a href="https://issues.apache.org/jira/browse/BAHIR"; 
target="_blank">Issue Tracker</a></li>
+              
+              
+              <li><a href="/community#source-code" target="_self">Source 
Code</a></li>
+              
+              
+              <li><a href="/community-members" target="_self">Project 
Committers</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="documentation">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Documentation<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="/docs/spark/overview" target="_self">Bahir Spark 
Extensions</a></li>
+              
+              
+              <li><a href="/docs/flink/overview" target="_self">Bahir Flink 
Extensions</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="github">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">GitHub<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="https://github.com/apache/bahir"; 
target="_blank">Bahir Spark Extensions</a></li>
+              
+              
+              <li><a href="https://github.com/apache/bahir-flink"; 
target="_blank">Bahir Flink Extensions</a></li>
+              
+              
+              <li><a href="https://github.com/apache/bahir-website"; 
target="_blank">Bahir Website</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+          
+          
+          <li id="apache">
+            
+            <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Apache<b class="caret"></b></a>
+            <ul class="dropdown-menu dropdown-left">
+              
+              
+              <li><a href="http://www.apache.org/foundation/how-it-works.html"; 
target="_blank">Apache Software Foundation</a></li>
+              
+              
+              <li><a href="http://www.apache.org/licenses/"; 
target="_blank">Apache License</a></li>
+              
+              
+              <li><a href="http://www.apache.org/foundation/sponsorship"; 
target="_blank">Sponsorship</a></li>
+              
+              
+              <li><a href="http://www.apache.org/foundation/thanks.html"; 
target="_blank">Thanks</a></li>
+              
+              
+              <li><a href="/privacy-policy" target="_self">Privacy 
Policy</a></li>
+              
+            </ul>
+            
+          </li>
+          
+          
+        </ul>
+      </nav><!--/.navbar-collapse -->
+      <!-- /.navbar-collapse -->
+    </div>
+    <!-- /.container -->
+  </nav>
+</div>
+
+
+    <div class="container">
+
+      
+
+<!--<div class="hero-unit Spark Streaming ZeroMQ">
+  <h1></h1>
+</div>
+-->
+
+<div class="row">
+  <div class="col-md-12">
+    <!--
+
+-->
+
+<p>A library for reading data from <a href="http://zeromq.org/";>ZeroMQ</a> 
using Spark Streaming.</p>
+
+<h2 id="linking">Linking</h2>
+
+<p>Using SBT:</p>
+
+<div class="highlighter-rouge"><pre 
class="highlight"><code>libraryDependencies += "org.apache.bahir" %% 
"spark-streaming-zeromq" % "2.1.2"
+</code></pre>
+</div>
+
+<p>Using Maven:</p>
+
+<div class="highlighter-rouge"><pre class="highlight"><code>&lt;dependency&gt;
+    &lt;groupId&gt;org.apache.bahir&lt;/groupId&gt;
+    &lt;artifactId&gt;spark-streaming-zeromq_2.11&lt;/artifactId&gt;
+    &lt;version&gt;2.1.2&lt;/version&gt;
+&lt;/dependency&gt;
+</code></pre>
+</div>
+
+<p>This library can also be added to Spark jobs launched through <code 
class="highlighter-rouge">spark-shell</code> or <code 
class="highlighter-rouge">spark-submit</code> by using the <code 
class="highlighter-rouge">--packages</code> command line option.
+For example, to include it when starting the spark shell:</p>
+
+<div class="highlighter-rouge"><pre class="highlight"><code>$ bin/spark-shell 
--packages org.apache.bahir:spark-streaming-zeromq_2.11:2.1.2
+</code></pre>
+</div>
+
+<p>Unlike using <code class="highlighter-rouge">--jars</code>, using <code 
class="highlighter-rouge">--packages</code> ensures that this library and its 
dependencies will be added to the classpath.
+The <code class="highlighter-rouge">--packages</code> argument can also be 
used with <code class="highlighter-rouge">bin/spark-submit</code>.</p>
+
+<p>This library is cross-published for Scala 2.10 and Scala 2.11, so users 
should replace the proper Scala version (2.10 or 2.11) in the commands listed 
above.</p>
+
+<h2 id="examples">Examples</h2>
+
+<h3 id="scala-api">Scala API</h3>
+
+<div class="highlighter-rouge"><pre class="highlight"><code>val lines = 
ZeroMQUtils.createStream(ssc, ...)
+</code></pre>
+</div>
+
+<h3 id="java-api">Java API</h3>
+
+<div class="highlighter-rouge"><pre 
class="highlight"><code>JavaDStream&lt;String&gt; lines = 
ZeroMQUtils.createStream(jssc, ...);
+</code></pre>
+</div>
+
+<p>See end-to-end examples at <a 
href="https://github.com/apache/bahir/tree/master/streaming-zeromq/examples";>ZeroMQ
 Examples</a></p>
+
+  </div>
+</div>
+
+
+
+      <hr>
+
+      <!-- <p>&copy; 2018 </p>-->
+      <footer class="site-footer">
+    <div class="wrapper">
+        <div class="footer-col-wrapper">
+            
+            <div style="text-align:center;">
+                
+                <div>
+                    Copyright &copy; 2016-2017 <a 
href="http://www.apache.org";>The Apache Software Foundation</a>.
+                    Licensed under the <a 
href="http://www.apache.org/licenses/LICENSE-2.0";>Apache License, Version
+                    2.0</a>.
+                    <br>
+                    
+                    Apache and the Apache Feather logo are trademarks of The 
Apache Software Foundation.
+                    
+                </div>
+            </div>
+        </div>
+    </div>
+</footer>
+
+    </div>
+
+    
+
+
+  <script type="text/javascript">
+  (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
+  (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new 
Date();a=s.createElement(o),
+  
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
+  })(window,document,'script','//www.google-analytics.com/analytics.js','ga');
+
+  ga('create', 'UA-79140859-1', 'bahir.apache.org');
+  ga('require', 'linkid', 'linkid.js');
+  ga('send', 'pageview');
+
+</script>
+
+
+
+    <script 
src="/assets/themes/apache-clean/jquery/jquery-2.1.1.min.js"></script>
+
+    <script 
src="/assets/themes/apache-clean/bootstrap/js/bootstrap.min.js"></script>
+
+
+  </body>
+</html>
+

http://git-wip-us.apache.org/repos/asf/bahir-website/blob/cc5d26ef/content/docs/spark/current/spark-sql-cloudant/index.html
----------------------------------------------------------------------
diff --git a/content/docs/spark/current/spark-sql-cloudant/index.html 
b/content/docs/spark/current/spark-sql-cloudant/index.html
index 65af1bf..78bb109 100644
--- a/content/docs/spark/current/spark-sql-cloudant/index.html
+++ b/content/docs/spark/current/spark-sql-cloudant/index.html
@@ -234,13 +234,13 @@ The <code class="highlighter-rouge">--packages</code> 
argument can also be used
 
 <p>Submit a job in Python:</p>
 
-<div class="highlighter-rouge"><pre class="highlight"><code>spark-submit  
--master local[4] --jars &lt;path to cloudant-spark.jar&gt;  &lt;path to python 
script&gt; 
+<div class="highlighter-rouge"><pre class="highlight"><code>spark-submit  
--master local[4] --packages 
org.apache.bahir:spark-sql-cloudant__2.11:2.3.0-SNAPSHOT  &lt;path to python 
script&gt;
 </code></pre>
 </div>
 
 <p>Submit a job in Scala:</p>
 
-<div class="highlighter-rouge"><pre class="highlight"><code>spark-submit 
--class "&lt;your class&gt;" --master local[4] --jars &lt;path to 
cloudant-spark.jar&gt; &lt;path to your app jar&gt;
+<div class="highlighter-rouge"><pre class="highlight"><code>spark-submit 
--class "&lt;your class&gt;" --master local[4] --packages 
org.apache.bahir:spark-sql-cloudant__2.11:2.3.0-SNAPSHOT &lt;path to 
spark-sql-cloudant jar&gt;
 </code></pre>
 </div>
 
@@ -259,7 +259,7 @@ The <code class="highlighter-rouge">--packages</code> 
argument can also be used
 <p>Here each subsequent configuration overrides the previous one. Thus, 
configuration set using DataFrame option overrides what has beens set in 
SparkConf. And configuration passed in spark-submit using –conf takes 
precedence over any setting in the code.</p>
 
 <h3 id="configuration-in-applicationconf">Configuration in 
application.conf</h3>
-<p>Default values are defined in <a 
href="cloudant-spark-sql/src/main/resources/application.conf">here</a>.</p>
+<p>Default values are defined in <a 
href="src/main/resources/application.conf">here</a>.</p>
 
 <h3 id="configuration-on-sparkconf">Configuration on SparkConf</h3>
 
@@ -273,6 +273,16 @@ The <code class="highlighter-rouge">--packages</code> 
argument can also be used
   </thead>
   <tbody>
     <tr>
+      <td>cloudant.batchInterval</td>
+      <td style="text-align: center">8</td>
+      <td>number of seconds to set for streaming all documents from <code 
class="highlighter-rouge">_changes</code> endpoint into Spark dataframe.  See 
<a 
href="https://spark.apache.org/docs/latest/streaming-programming-guide.html#setting-the-right-batch-interval";>Setting
 the right batch interval</a> for tuning this value.</td>
+    </tr>
+    <tr>
+      <td>cloudant.endpoint</td>
+      <td style="text-align: center"><code 
class="highlighter-rouge">_all_docs</code></td>
+      <td>endpoint for RelationProvider when loading data from Cloudant to 
DataFrames or SQL temporary tables. Select between the Cloudant <code 
class="highlighter-rouge">_all_docs</code> or <code 
class="highlighter-rouge">_changes</code> API endpoint.  See 
<strong>Note</strong> below for differences between endpoints.</td>
+    </tr>
+    <tr>
       <td>cloudant.protocol</td>
       <td style="text-align: center">https</td>
       <td>protocol to use to transfer data: http or https</td>
@@ -293,19 +303,34 @@ The <code class="highlighter-rouge">--packages</code> 
argument can also be used
       <td>cloudant password</td>
     </tr>
     <tr>
+      <td>cloudant.numberOfRetries</td>
+      <td style="text-align: center">3</td>
+      <td>number of times to replay a request that received a 429 <code 
class="highlighter-rouge">Too Many Requests</code> response</td>
+    </tr>
+    <tr>
       <td>cloudant.useQuery</td>
       <td style="text-align: center">false</td>
-      <td>By default, _all_docs endpoint is used if configuration ‘view’ 
and ‘index’ (see below) are not set. When useQuery is enabled, _find 
endpoint will be used in place of _all_docs when query condition is not on 
primary key field (_id), so that query predicates may be driven into 
datastore.</td>
+      <td>by default, <code class="highlighter-rouge">_all_docs</code> 
endpoint is used if configuration ‘view’ and ‘index’ (see below) are 
not set. When useQuery is enabled, <code class="highlighter-rouge">_find</code> 
endpoint will be used in place of <code 
class="highlighter-rouge">_all_docs</code> when query condition is not on 
primary key field (_id), so that query predicates may be driven into 
datastore.</td>
     </tr>
     <tr>
       <td>cloudant.queryLimit</td>
       <td style="text-align: center">25</td>
-      <td>The maximum number of results returned when querying the _find 
endpoint.</td>
+      <td>the maximum number of results returned when querying the <code 
class="highlighter-rouge">_find</code> endpoint.</td>
+    </tr>
+    <tr>
+      <td>cloudant.storageLevel</td>
+      <td style="text-align: center">MEMORY_ONLY</td>
+      <td>the storage level for persisting Spark RDDs during load when <code 
class="highlighter-rouge">cloudant.endpoint</code> is set to <code 
class="highlighter-rouge">_changes</code>.  See <a 
href="https://spark.apache.org/docs/latest/programming-guide.html#rdd-persistence";>RDD
 Persistence section</a> in Spark’s Progamming Guide for all available 
storage level options.</td>
+    </tr>
+    <tr>
+      <td>cloudant.timeout</td>
+      <td style="text-align: center">60000</td>
+      <td>stop the response after waiting the defined number of milliseconds 
for data.  Only supported with <code class="highlighter-rouge">changes</code> 
endpoint.</td>
     </tr>
     <tr>
       <td>jsonstore.rdd.partitions</td>
       <td style="text-align: center">10</td>
-      <td>the number of partitions intent used to drive JsonStoreRDD loading 
query result in parallel. The actual number is calculated based on total rows 
returned and satisfying maxInPartition and minInPartition</td>
+      <td>the number of partitions intent used to drive JsonStoreRDD loading 
query result in parallel. The actual number is calculated based on total rows 
returned and satisfying maxInPartition and minInPartition. Only supported with 
<code class="highlighter-rouge">_all_docs</code> endpoint.</td>
     </tr>
     <tr>
       <td>jsonstore.rdd.maxInPartition</td>
@@ -329,17 +354,39 @@ The <code class="highlighter-rouge">--packages</code> 
argument can also be used
     </tr>
     <tr>
       <td>schemaSampleSize</td>
-      <td style="text-align: center">“-1”</td>
-      <td>the sample size for RDD schema discovery. 1 means we are using only 
first document for schema discovery; -1 means all documents; 0 will be treated 
as 1; any number N means min(N, total) docs</td>
+      <td style="text-align: center">-1</td>
+      <td>the sample size for RDD schema discovery. 1 means we are using only 
the first document for schema discovery; -1 means all documents; 0 will be 
treated as 1; any number N means min(N, total) docs. Only supported with <code 
class="highlighter-rouge">_all_docs</code> endpoint.</td>
     </tr>
     <tr>
       <td>createDBOnSave</td>
-      <td style="text-align: center">“false”</td>
+      <td style="text-align: center">false</td>
       <td>whether to create a new database during save operation. If false, a 
database should already exist. If true, a new database will be created. If 
true, and a database with a provided name already exists, an error will be 
raised.</td>
     </tr>
   </tbody>
 </table>
 
+<p>The <code class="highlighter-rouge">cloudant.endpoint</code> option sets ` 
_changes<code class="highlighter-rouge"> or </code>_all_docs` API endpoint to 
be called while loading Cloudant data into Spark DataFrames or SQL Tables.</p>
+
+<p><strong>Note:</strong> When using <code 
class="highlighter-rouge">_changes</code> API, please consider: 
+1. Results are partially ordered and may not be be presented in order in 
+which documents were updated.
+2. In case of shards’ unavailability, you may see duplicate results (changes 
that have been seen already)
+3. Can use <code class="highlighter-rouge">selector</code> option to filter 
Cloudant docs during load
+4. Supports a real snapshot of the database and represents it in a single 
point of time.
+5. Only supports a single partition.</p>
+
+<p>When using <code class="highlighter-rouge">_all_docs</code> API:
+1. Supports parallel reads (using offset and range) and partitioning.
+2. Using partitions may not represent the true snapshot of a database.  Some 
docs
+   may be added or deleted in the database between loading data into different 
+   Spark partitions.</p>
+
+<p>If loading Cloudant docs from a database greater than 100 MB, set <code 
class="highlighter-rouge">cloudant.endpoint</code> to <code 
class="highlighter-rouge">_changes</code> and <code 
class="highlighter-rouge">spark.streaming.unpersist</code> to <code 
class="highlighter-rouge">false</code>.
+This will enable RDD persistence during load against <code 
class="highlighter-rouge">_changes</code> endpoint and allow the persisted RDDs 
to be accessible after streaming completes.</p>
+
+<p>See <a 
href="src/test/scala/org/apache/bahir/cloudant/CloudantChangesDFSuite.scala">CloudantChangesDFSuite</a>
 
+for examples of loading data into a Spark DataFrame with <code 
class="highlighter-rouge">_changes</code> API.</p>
+
 <h3 id="configuration-on-spark-sql-temporary-table-or-dataframe">Configuration 
on Spark SQL Temporary Table or DataFrame</h3>
 
 <p>Besides all the configurations passed to a temporary table or dataframe 
through SparkConf, it is also possible to set the following configurations in 
temporary table or dataframe using OPTIONS:</p>
@@ -354,39 +401,44 @@ The <code class="highlighter-rouge">--packages</code> 
argument can also be used
   </thead>
   <tbody>
     <tr>
-      <td>database</td>
-      <td style="text-align: center"> </td>
-      <td>cloudant database name</td>
+      <td>bulkSize</td>
+      <td style="text-align: center">200</td>
+      <td>the bulk save size</td>
     </tr>
     <tr>
-      <td>view</td>
+      <td>createDBOnSave</td>
+      <td style="text-align: center">false</td>
+      <td>whether to create a new database during save operation. If false, a 
database should already exist. If true, a new database will be created. If 
true, and a database with a provided name already exists, an error will be 
raised.</td>
+    </tr>
+    <tr>
+      <td>database</td>
       <td style="text-align: center"> </td>
-      <td>cloudant view w/o the database name. only used for load.</td>
+      <td>Cloudant database name</td>
     </tr>
     <tr>
       <td>index</td>
       <td style="text-align: center"> </td>
-      <td>cloudant search index w/o the database name. only used for load data 
with less than or equal to 200 results.</td>
+      <td>Cloudant Search index without the database name. Search index 
queries are limited to returning 200 results so can only be used to load data 
with &lt;= 200 results.</td>
     </tr>
     <tr>
       <td>path</td>
       <td style="text-align: center"> </td>
-      <td>cloudant: as database name if database is not present</td>
+      <td>Cloudant: as database name if database is not present</td>
     </tr>
     <tr>
       <td>schemaSampleSize</td>
-      <td style="text-align: center">“-1”</td>
+      <td style="text-align: center">-1</td>
       <td>the sample size used to discover the schema for this temp table. -1 
scans all documents</td>
     </tr>
     <tr>
-      <td>bulkSize</td>
-      <td style="text-align: center">200</td>
-      <td>the bulk save size</td>
+      <td>selector</td>
+      <td style="text-align: center">all documents</td>
+      <td>a selector written in Cloudant Query syntax, specifying conditions 
for selecting documents when the <code 
class="highlighter-rouge">cloudant.endpoint</code> option is set to <code 
class="highlighter-rouge">_changes</code>. Only documents satisfying the 
selector’s conditions will be retrieved from Cloudant and loaded into 
Spark.</td>
     </tr>
     <tr>
-      <td>createDBOnSave</td>
-      <td style="text-align: center">“false”</td>
-      <td>whether to create a new database during save operation. If false, a 
database should already exist. If true, a new database will be created. If 
true, and a database with a provided name already exists, an error will be 
raised.</td>
+      <td>view</td>
+      <td style="text-align: center"> </td>
+      <td>Cloudant view w/o the database name. only used for load.</td>
     </tr>
   </tbody>
 </table>

http://git-wip-us.apache.org/repos/asf/bahir-website/blob/cc5d26ef/content/docs/spark/current/spark-sql-streaming-mqtt/index.html
----------------------------------------------------------------------
diff --git a/content/docs/spark/current/spark-sql-streaming-mqtt/index.html 
b/content/docs/spark/current/spark-sql-streaming-mqtt/index.html
index b6922f5..5d2f547 100644
--- a/content/docs/spark/current/spark-sql-streaming-mqtt/index.html
+++ b/content/docs/spark/current/spark-sql-streaming-mqtt/index.html
@@ -331,6 +331,7 @@ query.awaitTermination();
 
 <p>Please see <code 
class="highlighter-rouge">JavaMQTTStreamWordCount.java</code> for full 
example.</p>
 
+
   </div>
 </div>
 

http://git-wip-us.apache.org/repos/asf/bahir-website/blob/cc5d26ef/content/docs/spark/current/spark-streaming-mqtt/index.html
----------------------------------------------------------------------
diff --git a/content/docs/spark/current/spark-streaming-mqtt/index.html 
b/content/docs/spark/current/spark-streaming-mqtt/index.html
index d4d3ddf..b20070a 100644
--- a/content/docs/spark/current/spark-streaming-mqtt/index.html
+++ b/content/docs/spark/current/spark-streaming-mqtt/index.html
@@ -255,6 +255,7 @@ this actor can be configured to handle failures, etc.</p>
 
 <div class="highlighter-rouge"><pre class="highlight"><code>val lines = 
MQTTUtils.createStream(ssc, brokerUrl, topic)
 val lines = MQTTUtils.createPairedStream(ssc, brokerUrl, topic)
+val lines = MQTTUtils.createPairedByteArrayStream(ssc, brokerUrl, topic)
 </code></pre>
 </div>
 
@@ -262,6 +263,7 @@ val lines = MQTTUtils.createPairedStream(ssc, brokerUrl, 
topic)
 
 <pre><code class="language-Scala">val lines = MQTTUtils.createStream(ssc, 
brokerUrl, topic, storageLevel, clientId, username, password, cleanSession, 
qos, connectionTimeout, keepAliveInterval, mqttVersion)
 val lines = MQTTUtils.createPairedStream(ssc, brokerUrl, topics, storageLevel, 
clientId, username, password, cleanSession, qos, connectionTimeout, 
keepAliveInterval, mqttVersion)
+val lines = MQTTUtils.createPairedByteArrayStream(ssc, brokerUrl, topics, 
storageLevel, clientId, username, password, cleanSession, qos, 
connectionTimeout, keepAliveInterval, mqttVersion)
 </code></pre>
 
 <h3 id="java-api">Java API</h3>
@@ -271,11 +273,24 @@ this actor can be configured to handle failures, etc.</p>
 
 <div class="highlighter-rouge"><pre 
class="highlight"><code>JavaDStream&lt;String&gt; lines = 
MQTTUtils.createStream(jssc, brokerUrl, topic);
 JavaReceiverInputDStream&lt;Tuple2&lt;String, String&gt;&gt; lines = 
MQTTUtils.createPairedStream(jssc, brokerUrl, topics);
+JavaReceiverInputDStream&lt;Tuple2&lt;String, String&gt;&gt; lines = 
MQTTUtils.createPairedByteArrayStream(jssc, brokerUrl, topics);
 </code></pre>
 </div>
 
 <p>See end-to-end examples at <a 
href="https://github.com/apache/bahir/tree/master/streaming-mqtt/examples";>MQTT 
Examples</a></p>
 
+<h3 id="python-api">Python API</h3>
+
+<p>Create a DStream from a single topic.</p>
+
+<pre><code class="language-Python">    MQTTUtils.createStream(ssc, broker_url, 
topic)
+</code></pre>
+
+<p>Create a DStream from a list of topics.</p>
+
+<pre><code class="language-Python">    MQTTUtils.createPairedStream(ssc, 
broker_url, topics)
+</code></pre>
+
   </div>
 </div>
 

http://git-wip-us.apache.org/repos/asf/bahir-website/blob/cc5d26ef/content/docs/spark/current/spark-streaming-pubsub/index.html
----------------------------------------------------------------------
diff --git a/content/docs/spark/current/spark-streaming-pubsub/index.html 
b/content/docs/spark/current/spark-streaming-pubsub/index.html
index a65c75d..a7088db 100644
--- a/content/docs/spark/current/spark-streaming-pubsub/index.html
+++ b/content/docs/spark/current/spark-streaming-pubsub/index.html
@@ -251,6 +251,33 @@ The <code class="highlighter-rouge">--packages</code> 
argument can also be used
 
 <p>See end-to-end examples at <a href="streaming-pubsub/examples">Google Cloud 
Pubsub Examples</a></p>
 
+<h3 id="unit-test">Unit Test</h3>
+
+<p>To run the PubSub test cases, you need to generate <strong>Google API 
service account key files</strong> and set the corresponding environment 
variable to enable the test.</p>
+
+<h4 id="to-generate-a-service-account-key-file-with-pubsub-permission">To 
generate a service account key file with PubSub permission</h4>
+
+<ol>
+  <li>Go to <a href="console.cloud.google.com">Google API Console</a></li>
+  <li>Choose the <code class="highlighter-rouge">Credentials</code> Tab&gt; 
<code class="highlighter-rouge">Create credentials</code> button&gt; <code 
class="highlighter-rouge">Service account key</code></li>
+  <li>Fill the account name, assign <code class="highlighter-rouge">Role&gt; 
Pub/Sub&gt; Pub/Sub Editor</code> and check the option <code 
class="highlighter-rouge">Furnish a private key</code> to create one. You need 
to create one for JSON key file, another for P12.</li>
+  <li>The account email is the <code class="highlighter-rouge">Service account 
ID</code></li>
+</ol>
+
+<h4 id="setting-the-environment-variables-and-run-test">Setting the 
environment variables and run test</h4>
+
+<div class="highlighter-rouge"><pre class="highlight"><code>mvn clean package 
-DskipTests -pl streaming-pubsub
+
+export ENABLE_PUBSUB_TESTS=1
+export GCP_TEST_ACCOUNT="THE_P12_SERVICE_ACCOUNT_ID_MENTIONED_ABOVE"
+export GCP_TEST_PROJECT_ID="YOUR_GCP_PROJECT_ID"
+export 
GCP_TEST_JSON_KEY_PATH=/path/to/pubsub/credential/files/Apache-Bahir-PubSub-1234abcd.json
+export 
GCP_TEST_P12_KEY_PATH=/path/to/pubsub/credential/files/Apache-Bahir-PubSub-5678efgh.p12
+
+mvn test -pl streaming-pubsub
+</code></pre>
+</div>
+
   </div>
 </div>
 

http://git-wip-us.apache.org/repos/asf/bahir-website/blob/cc5d26ef/content/docs/spark/current/spark-streaming-twitter/index.html
----------------------------------------------------------------------
diff --git a/content/docs/spark/current/spark-streaming-twitter/index.html 
b/content/docs/spark/current/spark-streaming-twitter/index.html
index 138ff47..71fc89a 100644
--- a/content/docs/spark/current/spark-streaming-twitter/index.html
+++ b/content/docs/spark/current/spark-streaming-twitter/index.html
@@ -248,7 +248,7 @@ TwitterUtils.createStream(jssc);
 </code></pre>
 </div>
 
-<p>You can also either get the public stream, or get the filtered stream based 
on keywords.
+<p>You can also either get the public stream, or get the filtered stream based 
on keywords. 
 See end-to-end examples at <a 
href="https://github.com/apache/bahir/tree/master/streaming-twitter/examples";>Twitter
 Examples</a></p>
 
   </div>

http://git-wip-us.apache.org/repos/asf/bahir-website/blob/cc5d26ef/content/feed.xml
----------------------------------------------------------------------
diff --git a/content/feed.xml b/content/feed.xml
index 2e2e6b7..a880483 100644
--- a/content/feed.xml
+++ b/content/feed.xml
@@ -6,8 +6,8 @@
 </description>
     <link>http://bahir.apache.org/</link>
     <atom:link href="http://bahir.apache.org/feed.xml"; rel="self" 
type="application/rss+xml"/>
-    <pubDate>Sat, 28 Apr 2018 13:12:59 -0700</pubDate>
-    <lastBuildDate>Sat, 28 Apr 2018 13:12:59 -0700</lastBuildDate>
+    <pubDate>Thu, 07 Jun 2018 10:44:14 +0200</pubDate>
+    <lastBuildDate>Thu, 07 Jun 2018 10:44:14 +0200</lastBuildDate>
     <generator>Jekyll v3.2.1</generator>
     
       <item>
@@ -23,7 +23,7 @@ it.&lt;/p&gt;
 this release includes faster-than-light travel and chewing gum that
 never loses its flavor.&lt;/p&gt;
 </description>
-        <pubDate>Tue, 10 Nov 2015 04:00:00 -0800</pubDate>
+        <pubDate>Tue, 10 Nov 2015 13:00:00 +0100</pubDate>
         <link>http://bahir.apache.org/news/2015/11/10/release-0.2.0/</link>
         <guid 
isPermaLink="true">http://bahir.apache.org/news/2015/11/10/release-0.2.0/</guid>
         
@@ -48,9 +48,9 @@ committers for their work on the project. Welcome!&lt;/p&gt;
   &lt;li&gt;Princess Leia&lt;/li&gt;
 &lt;/ul&gt;
 </description>
-        <pubDate>Sun, 08 Nov 2015 19:03:07 -0800</pubDate>
-        <link>http://bahir.apache.org/news/2015/11/08/new-committers/</link>
-        <guid 
isPermaLink="true">http://bahir.apache.org/news/2015/11/08/new-committers/</guid>
+        <pubDate>Mon, 09 Nov 2015 04:03:07 +0100</pubDate>
+        <link>http://bahir.apache.org/news/2015/11/09/new-committers/</link>
+        <guid 
isPermaLink="true">http://bahir.apache.org/news/2015/11/09/new-committers/</guid>
         
         
         <category>team</category>
@@ -67,7 +67,7 @@ committers for their work on the project. Welcome!&lt;/p&gt;
 
 &lt;p&gt;We’re so pleased to be in the Apache Incbuator.&lt;/p&gt;
 </description>
-        <pubDate>Fri, 25 Sep 2015 05:00:00 -0700</pubDate>
+        <pubDate>Fri, 25 Sep 2015 14:00:00 +0200</pubDate>
         <link>http://bahir.apache.org/news/2015/09/25/release-0.1.0/</link>
         <guid 
isPermaLink="true">http://bahir.apache.org/news/2015/09/25/release-0.1.0/</guid>
         

http://git-wip-us.apache.org/repos/asf/bahir-website/blob/cc5d26ef/content/news/2015/11/08/new-committers/index.html
----------------------------------------------------------------------
diff --git a/content/news/2015/11/08/new-committers/index.html 
b/content/news/2015/11/08/new-committers/index.html
deleted file mode 100644
index f0dd77d..0000000
--- a/content/news/2015/11/08/new-committers/index.html
+++ /dev/null
@@ -1,11 +0,0 @@
-<!--
-
--->
-
-<p>The Bahir project management committee today added two new
-committers for their work on the project. Welcome!</p>
-
-<ul>
-  <li>Darth Vader</li>
-  <li>Princess Leia</li>
-</ul>

http://git-wip-us.apache.org/repos/asf/bahir-website/blob/cc5d26ef/content/news/2015/11/09/new-committers/index.html
----------------------------------------------------------------------
diff --git a/content/news/2015/11/09/new-committers/index.html 
b/content/news/2015/11/09/new-committers/index.html
new file mode 100644
index 0000000..f0dd77d
--- /dev/null
+++ b/content/news/2015/11/09/new-committers/index.html
@@ -0,0 +1,11 @@
+<!--
+
+-->
+
+<p>The Bahir project management committee today added two new
+committers for their work on the project. Welcome!</p>
+
+<ul>
+  <li>Darth Vader</li>
+  <li>Princess Leia</li>
+</ul>

Reply via email to