http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_includes/themes/mahout/header.html
----------------------------------------------------------------------
diff --git a/website/_includes/themes/mahout/header.html 
b/website/_includes/themes/mahout/header.html
new file mode 100644
index 0000000..0c29215
--- /dev/null
+++ b/website/_includes/themes/mahout/header.html
@@ -0,0 +1,75 @@
+<!doctype html>
+<!--[if lt IE 7 ]><html itemscope itemtype="http://schema.org/{% if page.id or 
post.id %}Article{% else %}Organization{% endif %}" id="ie6" class="ie ie-old" 
lang="en-US"><![endif]-->
+<!--[if IE 7 ]>   <html itemscope itemtype="http://schema.org/{% if page.id or 
post.id %}Article{% else %}Organization{% endif %}" id="ie7" class="ie ie-old" 
lang="en-US"><![endif]-->
+<!--[if IE 8 ]>   <html itemscope itemtype="http://schema.org/{% if page.id or 
post.id %}Article{% else %}Organization{% endif %}" id="ie8" class="ie ie-old" 
lang="en-US"><![endif]-->
+<!--[if IE 9 ]>   <html itemscope itemtype="http://schema.org/{% if page.id or 
post.id %}Article{% else %}Organization{% endif %}" id="ie9" class="ie" 
lang="en-US"><![endif]-->
+<!--[if gt IE 9]><!--><html itemscope itemtype="http://schema.org/{% if 
page.id or post.id %}Article{% else %}Organization{% endif %}" 
lang="en-US"><!--<![endif]-->
+<head>
+    <meta name="author" content="The Apache Software Foundation">
+
+    <!-- Enable responsive viewport -->
+    <meta name="viewport" content="width=device-width, initial-scale=1.0">
+
+    <!-- Le HTML5 shim, for IE6-8 support of HTML elements -->
+    <!--[if lt IE 9]>
+      <script 
src="http://html5shim.googlecode.com/svn/trunk/html5.js";></script>
+    <![endif]-->
+
+    <!-- Le styles -->
+    <link 
href="https://maxcdn.bootstrapcdn.com/font-awesome/4.7.0/css/font-awesome.min.css";
 rel="stylesheet" 
integrity="sha384-wvfXpqpZZVQGK6TAh5PVlGOfQNHSoD2xbE+QkPxCAFlNEevoEH3Sl0sibVcOQVnN"
 crossorigin="anonymous">
+    <link href="https://fonts.googleapis.com/icon?family=Material+Icons";>
+    <link href="/assets/themes/mahout/bootstrap/css/bootstrap.css" 
rel="stylesheet">
+    <link href="/assets/themes/mahout/css/style.css?body=1" rel="stylesheet" 
type="text/css">
+    <link href="/assets/themes/mahout/css/syntax.css" rel="stylesheet"  
type="text/css" media="screen" /> 
+
+<!--
+    <link href="/assets/themes/mahout/css/main.css" rel="stylesheet"  
type="text/css" media="screen" /> 
+    <link href="/assets/themes/mahout/css/global.css" rel="stylesheet"  
type="text/css" media="screen" /> 
+    <link href="/assets/themes/mahout/css/global__.css" rel="stylesheet"  
type="text/css" media="screen" /> 
+-->
+    <!-- Le fav and touch icons -->
+    <!-- Update these with your own images
+    <link rel="shortcut icon" href="images/favicon.ico">
+    <link rel="apple-touch-icon" href="images/apple-touch-icon.png">
+    <link rel="apple-touch-icon" sizes="72x72" 
href="images/apple-touch-icon-72x72.png">
+    <link rel="apple-touch-icon" sizes="114x114" 
href="images/apple-touch-icon-114x114.png">
+    -->
+
+    <!-- Js -->
+    <script src="https://code.jquery.com/jquery-1.10.2.min.js";></script>
+    <script src="/assets/themes/mahout/bootstrap/js/bootstrap.min.js"></script>
+<script type="text/x-mathjax-config">
+  MathJax.Hub.Config({
+    tex2jax: {
+      skipTags: ['script', 'noscript', 'style', 'textarea', 'pre']
+    }
+  });
+  MathJax.Hub.Queue(function() {
+    var all = MathJax.Hub.getAllJax(), i;
+    for(i = 0; i < all.length; i += 1) {
+      all[i].SourceElement().parentNode.className += ' has-jax';
+    }
+  });
+  </script>
+  <script type="text/javascript">
+    var mathjax = document.createElement('script'); 
+    mathjax.type = 'text/javascript'; 
+    mathjax.async = true;
+
+    mathjax.src = ('https:' == document.location.protocol) ?
+        
'https://c328740.ssl.cf1.rackcdn.com/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML'
 : 
+        
'http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML';
+    
+      var s = document.getElementsByTagName('script')[0]; 
+    s.parentNode.insertBefore(mathjax, s);
+  </script>
+
+
+    <!-- atom & rss feed -->
+    <link href="/atom.xml" type="application/atom+xml" rel="alternate" 
title="Sitewide ATOM Feed">
+    <link href="/rss.xml" type="application/rss+xml" rel="alternate" 
title="Sitewide RSS Feed">
+
+
+</head>
+
+

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_includes/themes/mahout/homepage.html
----------------------------------------------------------------------
diff --git a/website/_includes/themes/mahout/homepage.html 
b/website/_includes/themes/mahout/homepage.html
new file mode 100644
index 0000000..3550013
--- /dev/null
+++ b/website/_includes/themes/mahout/homepage.html
@@ -0,0 +1,161 @@
+
+<div class="jumbotron">
+  <div class="container">
+    <h1>Apache Mahout - DRAFT </h1>
+    <p>A distributed linear algebra framework that runs on Spark, Flink, GPU's 
and more!<br/>
+      Use Mahout's library of machine learning algorithms or roll your own!  
Use Mahout-Samsara to write matrix
+      algebra using R like syntax.  Check out our tutorials and quick start 
guide to get rolling.
+    </p>
+    <div class="border row">
+      <div class="col-md-12 col-sm-12 col-xs-12 text-center newBtn">
+        <a href="http://youtube.com"; target="_zeppelinVideo" class="btn 
btn-primary btn-lg bigFingerButton" role="button">Tutorial Video</a>
+        <!-- is this supposed to be a link that works or is it an artifact?? 
^^ -->
+        <a href="https://github.com/apache/mahout"; class="btn btn-primary 
btn-lg bigFingerButton" role="button">GET LATEST MAHOUT</a>
+      </div>
+    </div>
+  </div>
+</div>
+
+<!-- 3 wide column -->
+
+<!--
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+
+
+<div class="new">
+  <div class="container">
+    <h2>Latest Release</h2>
+    <span class="newZeppelin center-block">Apache Mahout 0.13.0</span>
+    <div class="border row">
+      <div class="border col-md-4 col-sm-4">
+        <h4>Simple and <br/>Extensible</h4>
+        <div class="viz">
+          <p>
+            Build your own algorithms using Mahouts R like interface.  See an 
example in this
+            <a href="" target="_blank">demo</a>
+          </p>
+        </div>
+      </div>
+      <div class="border col-md-4 col-sm-4">
+        <h4>Support for Multiple <br/>Distributed Backends</h4>
+        <div class="multi">
+        <p>
+           Custom bindings for Spark, Flink, and H20 enable a write once run 
anywhere machine learning platform
+          <a class="thumbnail text-center" href="#thumb">
+            See more in this DEMO.
+            <span><img src="./assets/themes/zeppelin/img/scope.gif" 
style="max-width: 55vw" /></span>
+          </a>
+        </p>
+        </div>
+      </div>
+      <div class="border col-md-4 col-sm-4">
+        <h4>Introducing Samsara an R<br/> dsl for writing ML algos</h4>
+        <div class="personal">
+        <p>
+          Use this capability to write algorithms at scale, that will run on 
any backend
+        </p>
+        </div>
+      </div>
+    </div>
+    <div class="border row">
+      <div class="border col-md-4 col-sm-4">
+        <h4>Support for GPUs</h4>
+        <p>
+          Distributed GPU Matrix-Matrix and Matrix-Vector multiplication on 
Spark along with sparse and dense matrix GPU-backed support.
+        </p>
+      </div>
+      <div class="border col-md-4 col-sm-4">
+        <h4>Extensible Algorithms Framework</h4>
+        <p>
+           A new scikit-learn-like framework for algorithms with the goal for
+           creating a consistent API for various machine-learning algorithms
+        </p>
+      </div>
+      <div class="border col-md-4 col-sm-4">
+        <h4>0.13.1 - Future Plans</h4>
+        <p>
+          Further Native Integration
+          * JCuda backing for In-core Matrices and CUDA solvers
+          * GPU/OpenMP Acceleration for linear solvers
+          * Scala 2.11 Support
+          * Spark 2.x Support
+        </p>
+      </div>
+    </div>
+    <div class="col-md-12 col-sm-12 col-xs-12 text-center">
+      <p style="text-align:center; margin-top: 32px; font-size: 14px; color: 
gray; font-weight: 200; font-style: italic; padding-bottom: 0;">See more 
details in
+        <a href="tbd">0.13.0 Release Note</a>
+      </p>
+    </div>
+  </div>
+</div>
+
+      <!--
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+
+
+        <div class="container">
+            <div class="row">
+                <div class="col-md-12">
+
+
+                </div>
+            </div>
+            <div class="row">
+                <div class="col-md-12">
+                    {% for post in paginator.posts %}
+                        {% include tile.html %}
+                    {% endfor %}
+
+
+
+                </div>
+            </div>
+        </div>
+
+
+
+<div class="new">
+  <div class="container">
+    <h2>Mahout on Twitter</h2>
+    <br/>
+    <div class="row">
+      <div class="col-md-12 col-sm-12 col-xs-12 text-center">
+        <div class='jekyll-twitter-plugin'><a class="twitter-timeline" 
data-width="500" data-tweet-limit="4" data-chrome="nofooter" 
href="https://twitter.com/ApacheMahout";>Tweets by ApacheMahout</a>
+<script async src="//platform.twitter.com/widgets.js" 
charset="utf-8"></script></div>
+      </div>
+      <div class="col-md-12 col-sm-12 col-xs-12 text-center twitterBtn">
+        <p style="text-align:center; margin-top: 32px; font-size: 12px; color: 
gray; font-weight: 200; font-style: italic; padding-bottom: 0;">See more tweets 
or</p>
+        <a href="https://twitter.com/ApacheMahout"; target="_blank" class="btn 
btn-primary btn-lg round" role="button">
+          Follow Mahout on &nbsp;
+          <i class="fa fa-twitter fa-lg" aria-hidden="true"></i>
+        </a>
+      </div>
+    </div>
+  </div>
+  <hr>
+</div>

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_includes/themes/mahout/navbar_main.html
----------------------------------------------------------------------
diff --git a/website/_includes/themes/mahout/navbar_main.html 
b/website/_includes/themes/mahout/navbar_main.html
new file mode 100644
index 0000000..409d1e9
--- /dev/null
+++ b/website/_includes/themes/mahout/navbar_main.html
@@ -0,0 +1,69 @@
+<body class="{{ post.title | downcase | replace:' ','-' | replace:',','' | 
strip_html }}{% if page.category %} category-{{ page.category }}{% endif %}{% 
if page.layout-class %} layout-{{ page.layout-class }}{% endif %}">
+
+    <div class="navbar navbar-inverse navbar-fixed-top" role="navigation">
+      <div class="container">
+        <div class="navbar-header">
+          <button type="button" class="navbar-toggle" data-toggle="collapse" 
data-target=".navbar-collapse">
+            <span class="sr-only">Toggle navigation</span>
+            <span class="icon-bar"></span>
+            <span class="icon-bar"></span>
+            <span class="icon-bar"></span>
+          </button>
+          <a class="navbar-brand" href="/">
+            <img src="/assets/themes/mahout/img/mahout-logo.png" width="75" 
alt="I'm mahout">
+            Apache Mahout
+          </a>
+        </div>
+        <nav class="navbar-collapse collapse" role="navigation">
+          <ul class="nav navbar-nav navbar-right">
+
+            <li id="community">
+              <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Community<b class="caret"></b></a>
+              <ul class="dropdown-menu">
+                <li><a href="/developers/how-to-contribute.html">How to 
Contribute</a></li>
+                <li><a href="/issue-tracker">Issues Tracking (JIRA)</a></li>
+                <li><a href="/developers">Developers</a></li>
+                <li><a href="/mailing-lists">Mailing Lists</a></li>
+                <li><a href="/reference">References</a></li>
+              </ul>
+            </li>         
+            
+            <!-- Download -->
+            <li id="download">
+              <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Download<b class="caret"></b></a>
+              <ul class="dropdown-menu">
+                <li><a href="https://github.com/apache/mahout";>Download 
Mahout</a></li>
+              </ul>
+            </li>
+
+            <!-- Docs -->
+            <li id="docs">
+              <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Docs<b class="caret"></b></a>
+              <ul class="dropdown-menu">
+                <li><span><b>Release</b><span></li>
+                <li><a href="/docs/0.13.0/quickstart">0.13.0</a></li>
+                <li><a href="/tbd">Older Versions</a></li>
+                <li role="separator" class="divider"></li>
+                <li><span><b>Snapshot</b>&nbsp;(development)<span></li>
+                <li><a href="/docs/0.13.1-SNAPSHOT">0.13.1-SNAPSHOT</a></li>
+              </ul>
+            </li>
+
+            <li><a href="/docs/0.13.0/quickstart">QuickStart</a></li>
+            <li><a href="https://github.com/apache/mahout";>GitHub</a></li>
+
+            <!-- Apache -->
+            <li id="apache">
+              <a href="#" data-toggle="dropdown" 
class="dropdown-toggle">Apache<b class="caret"></b></a>
+               <ul class="dropdown-menu">
+                <li><a 
href="http://www.apache.org/foundation/how-it-works.html";>Apache Software 
Foundation</a></li>
+                <li><a href="http://www.apache.org/licenses/";>Apache 
License</a></li>
+                <li><a 
href="http://www.apache.org/foundation/sponsorship.html";>Sponsorship</a></li>
+                <li><a 
href="http://www.apache.org/foundation/thanks.html";>Thanks</a></li>
+              </ul>
+            </li>
+          </ul>
+        </nav><!--/.navbar-collapse -->
+      </div>
+    </div>
+

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_includes/themes/mahout/page.html
----------------------------------------------------------------------
diff --git a/website/_includes/themes/mahout/page.html 
b/website/_includes/themes/mahout/page.html
new file mode 100644
index 0000000..e981ad9
--- /dev/null
+++ b/website/_includes/themes/mahout/page.html
@@ -0,0 +1,9 @@
+<div class="page-header">
+  <h1>{{ page.title }} {% if page.tagline %} <small>{{ page.tagline 
}}</small>{% endif %}</h1>
+</div>
+
+<div class="row">
+  <div class="col-xs-12">
+    {{ content }}
+  </div>
+</div>

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_includes/themes/mahout/pagination.html
----------------------------------------------------------------------
diff --git a/website/_includes/themes/mahout/pagination.html 
b/website/_includes/themes/mahout/pagination.html
new file mode 100644
index 0000000..fcccc8d
--- /dev/null
+++ b/website/_includes/themes/mahout/pagination.html
@@ -0,0 +1,35 @@
+<nav class="pagination-wrap">
+    <ul class="pagination">
+        {% if paginator.previous_page %}
+        <li>
+            <a href="{{ paginator.previous_page_path | prepend: site.baseurl | 
replace: '//', '/' }}" aria-label="Previous">
+                <span aria-hidden="true">&laquo;</span>
+            </a>
+        </li>
+        {% endif %}
+
+        {% for page in (1..paginator.total_pages) %}
+            {% if page == paginator.page %}
+            <li class="active">
+                <a href="#">{{ page }}</a>
+            </li>
+            {% elsif page == 1 %}
+            <li>
+                <a href="{{ paginator.previous_page_path | prepend: 
site.baseurl | replace: '//', '/' }}">{{ page }}</a>
+            </li>
+            {% else %}
+            <li>
+                <a href="{{ site.paginate_path | prepend: site.baseurl | 
replace: '//', '/' | replace: ':num', page }}">{{ page }}</a>
+            </li>
+            {% endif %}
+        {% endfor %}
+
+        {% if paginator.next_page %}
+        <li>
+            <a href="{{ paginator.next_page_path | prepend: site.baseurl | 
replace: '//', '/' }}" aria-label="Next">
+                <span aria-hidden="true">&raquo;</span>
+            </a>
+        </li>
+        {% endif %}
+    </ul>
+</nav>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_includes/themes/mahout/post.html
----------------------------------------------------------------------
diff --git a/website/_includes/themes/mahout/post.html 
b/website/_includes/themes/mahout/post.html
new file mode 100644
index 0000000..85cc0f4
--- /dev/null
+++ b/website/_includes/themes/mahout/post.html
@@ -0,0 +1,47 @@
+<div class="page-header">
+  <h1>{{ page.title }} {% if page.tagline %}<small>{{page.tagline}}</small>{% 
endif %}</h1>
+</div>
+
+<div class="row post-full">
+  <div class="col-xs-12">
+    <div class="date">
+      <span>{{ page.date | date_to_long_string }}</span>
+    </div>
+    <div class="content">
+      {{ content }}
+    </div>
+
+  {% unless page.categories == empty %}
+    <ul class="tag_box inline">
+      <li><i class="glyphicon glyphicon-open"></i></li>
+      {% assign categories_list = page.categories %}
+      {% include JB/categories_list %}
+    </ul>
+  {% endunless %}  
+
+  {% unless page.tags == empty %}
+    <ul class="tag_box inline">
+      <li><i class="glyphicon glyphicon-tags"></i></li>
+      {% assign tags_list = page.tags %}
+      {% include JB/tags_list %}
+    </ul>
+  {% endunless %}  
+  
+    <hr>
+    <ul class="pagination">
+    {% if page.previous %}
+      <li class="prev"><a href="{{ BASE_PATH }}{{ page.previous.url }}" 
title="{{ page.previous.title }}">&laquo; Previous</a></li>
+    {% else %}
+      <li class="prev disabled"><a>&larr; Previous</a></li>
+    {% endif %}
+      <li><a href="{{ BASE_PATH }}{{ site.JB.archive_path }}">Archive</a></li>
+    {% if page.next %}
+      <li class="next"><a href="{{ BASE_PATH }}{{ page.next.url }}" title="{{ 
page.next.title }}">Next &raquo;</a></li>
+    {% else %}
+      <li class="next disabled"><a>Next &rarr;</a>
+    {% endif %}
+    </ul>
+    <hr>
+    {% include JB/comments %}
+  </div>
+</div>

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_includes/themes/mahout/settings.yml
----------------------------------------------------------------------
diff --git a/website/_includes/themes/mahout/settings.yml 
b/website/_includes/themes/mahout/settings.yml
new file mode 100644
index 0000000..7bf9f32
--- /dev/null
+++ b/website/_includes/themes/mahout/settings.yml
@@ -0,0 +1,2 @@
+theme :
+  name : mahout
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_includes/themes/mahout/tile.html
----------------------------------------------------------------------
diff --git a/website/_includes/themes/mahout/tile.html 
b/website/_includes/themes/mahout/tile.html
new file mode 100644
index 0000000..4c943c2
--- /dev/null
+++ b/website/_includes/themes/mahout/tile.html
@@ -0,0 +1,36 @@
+<div class="tile">
+    {% if post.featured_image or post.featured_video_id %}
+    <a href="{{ post.url | prepend: site.baseurl }}" class="featurette">
+        {% if post.featured_image %}
+        <!--<span class="img" style="background-image: url('{{ 
post.featured_image }}');"></span> -->
+        <span class="img" style="background-image"><img src= '{{ 
post.mh_featured_image }}'></span>
+
+        {% endif %}
+
+        {% if post.featured_video_id %}
+        <iframe src="https://www.youtube.com/embed/{{ post.featured_video_id 
}}" frameborder="0" allowfullscreen></iframe>
+        {% endif %}
+    </a>
+    {% endif %}
+    <div class="inner-guts">
+        <h2><a href="{{ post.mh_featured_url  }}">{{ post.title }}</a></h2>
+        <aside>
+            {{ post.date | date: "%m.%d.%y" }}
+            {% if post.categories != empty %}
+                &middot;
+                {% for category in post.categories %}
+                    <span>{{ category }}</span>
+                {% endfor %}
+            {% endif %}
+        </aside>
+        <div class="excerpt">{{ post.excerpt }}</div>
+        <a href="{{ post.mh_featured_url }}" class="read-more btn btn-info 
btn-lg">Read More <i class="fa fa-angle-right"></i></a>
+        {% if post.categories != empty %}
+        <div class="tags">
+            {% for tag in post.tags %}
+            <span>{{ tag }}</span>
+            {% endfor %}
+        </div>
+        {% endif %}
+    </div>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_includes/themes/mahout/title-group.html
----------------------------------------------------------------------
diff --git a/website/_includes/themes/mahout/title-group.html 
b/website/_includes/themes/mahout/title-group.html
new file mode 100644
index 0000000..3daca36
--- /dev/null
+++ b/website/_includes/themes/mahout/title-group.html
@@ -0,0 +1,43 @@
+{% if page.featured_image or page.featured_video_id %}
+    <div class="topper">
+        {% if page.featured_image %}
+        <span class="img" style="background-image: url('{{ page.featured_image 
}}');">
+        {% endif %}
+
+        {% if page.featured_video_id %}
+        <iframe src="https://www.youtube.com/embed/{{ page.featured_video_id 
}}" frameborder="0" allowfullscreen></iframe>
+        {% endif %}
+    </div>
+{% endif %}
+
+<div class="title-group">
+    <h1 class="special">
+        <span>
+        {% if page.title %}
+            {{ page.title }}
+        {% else %}
+            {{ site.data.global.title }}
+        {% endif %}
+        </span>
+    </h1>
+    {% if page.id %}
+    <aside>
+        {{ page.date | date: "%m.%d.%y" }}
+        {% if page.categories != empty %}
+            &middot;
+            {% for category in page.categories %}
+                <span>{{ category }}</span>
+            {% endfor %}
+        {% endif %}
+    </aside>
+    {% endif %}
+    <p class="lead">
+        {% if page.lead_text %}
+            {{ page.lead_text }}
+        {% elsif post.lead_text %}
+            {{ post.lead_text }}
+        {% else %}
+            {{ site.data.global.description }}
+        {% endif %}
+    </p>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_includes/tile.html
----------------------------------------------------------------------
diff --git a/website/_includes/tile.html b/website/_includes/tile.html
deleted file mode 100644
index 4c943c2..0000000
--- a/website/_includes/tile.html
+++ /dev/null
@@ -1,36 +0,0 @@
-<div class="tile">
-    {% if post.featured_image or post.featured_video_id %}
-    <a href="{{ post.url | prepend: site.baseurl }}" class="featurette">
-        {% if post.featured_image %}
-        <!--<span class="img" style="background-image: url('{{ 
post.featured_image }}');"></span> -->
-        <span class="img" style="background-image"><img src= '{{ 
post.mh_featured_image }}'></span>
-
-        {% endif %}
-
-        {% if post.featured_video_id %}
-        <iframe src="https://www.youtube.com/embed/{{ post.featured_video_id 
}}" frameborder="0" allowfullscreen></iframe>
-        {% endif %}
-    </a>
-    {% endif %}
-    <div class="inner-guts">
-        <h2><a href="{{ post.mh_featured_url  }}">{{ post.title }}</a></h2>
-        <aside>
-            {{ post.date | date: "%m.%d.%y" }}
-            {% if post.categories != empty %}
-                &middot;
-                {% for category in post.categories %}
-                    <span>{{ category }}</span>
-                {% endfor %}
-            {% endif %}
-        </aside>
-        <div class="excerpt">{{ post.excerpt }}</div>
-        <a href="{{ post.mh_featured_url }}" class="read-more btn btn-info 
btn-lg">Read More <i class="fa fa-angle-right"></i></a>
-        {% if post.categories != empty %}
-        <div class="tags">
-            {% for tag in post.tags %}
-            <span>{{ tag }}</span>
-            {% endfor %}
-        </div>
-        {% endif %}
-    </div>
-</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_includes/title-group.html
----------------------------------------------------------------------
diff --git a/website/_includes/title-group.html 
b/website/_includes/title-group.html
deleted file mode 100644
index 3daca36..0000000
--- a/website/_includes/title-group.html
+++ /dev/null
@@ -1,43 +0,0 @@
-{% if page.featured_image or page.featured_video_id %}
-    <div class="topper">
-        {% if page.featured_image %}
-        <span class="img" style="background-image: url('{{ page.featured_image 
}}');">
-        {% endif %}
-
-        {% if page.featured_video_id %}
-        <iframe src="https://www.youtube.com/embed/{{ page.featured_video_id 
}}" frameborder="0" allowfullscreen></iframe>
-        {% endif %}
-    </div>
-{% endif %}
-
-<div class="title-group">
-    <h1 class="special">
-        <span>
-        {% if page.title %}
-            {{ page.title }}
-        {% else %}
-            {{ site.data.global.title }}
-        {% endif %}
-        </span>
-    </h1>
-    {% if page.id %}
-    <aside>
-        {{ page.date | date: "%m.%d.%y" }}
-        {% if page.categories != empty %}
-            &middot;
-            {% for category in page.categories %}
-                <span>{{ category }}</span>
-            {% endfor %}
-        {% endif %}
-    </aside>
-    {% endif %}
-    <p class="lead">
-        {% if page.lead_text %}
-            {{ page.lead_text }}
-        {% elsif post.lead_text %}
-            {{ post.lead_text }}
-        {% else %}
-            {{ site.data.global.description }}
-        {% endif %}
-    </p>
-</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_layouts/default.html
----------------------------------------------------------------------
diff --git a/website/_layouts/default.html b/website/_layouts/default.html
index 68ef247..2a57238 100644
--- a/website/_layouts/default.html
+++ b/website/_layouts/default.html
@@ -1,4 +1,6 @@
-{% include header.html %}
-{% include navbar_main.html %}
-{% include homepage.html %}
-{% include footer.html %}
\ No newline at end of file
+---
+theme :
+  name : mahout
+---
+{% include JB/setup %}
+{% include themes/mahout/default.html %}

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_layouts/inner.html
----------------------------------------------------------------------
diff --git a/website/_layouts/inner.html b/website/_layouts/inner.html
index 6603d8d..6b6960c 100644
--- a/website/_layouts/inner.html
+++ b/website/_layouts/inner.html
@@ -1,4 +1,4 @@
-{% include header.html %}
+{% include themes/mahout/header.html %}
 
 
     <article>
@@ -13,4 +13,4 @@
         </div>
     </article>
 
-{% include footer.html %}
+{% include themes/mahout/footer.html %}

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_layouts/mahout.html
----------------------------------------------------------------------
diff --git a/website/_layouts/mahout.html b/website/_layouts/mahout.html
index d174533..16e445d 100644
--- a/website/_layouts/mahout.html
+++ b/website/_layouts/mahout.html
@@ -1,5 +1,5 @@
-{% include header.html %}
-{% include navbar_main.html %}
+{% include themes/mahout/header.html %}
+{% include themes/mahout/navbar_main.html %}
 
 
     <article>
@@ -13,4 +13,4 @@
         </div>
     </article>
 
-{% include footer.html %}
+{% include themes/mahout/footer.html %}

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_layouts/mahoutdoc.html
----------------------------------------------------------------------
diff --git a/website/_layouts/mahoutdoc.html b/website/_layouts/mahoutdoc.html
index 65a73aa..491f9fa 100644
--- a/website/_layouts/mahoutdoc.html
+++ b/website/_layouts/mahoutdoc.html
@@ -1,4 +1,4 @@
-{% include header.html %}
+{% include themes/mahout/header.html %}
 <body class="{{ post.title | downcase | replace:' ','-' | replace:',','' | 
strip_html }}{% if page.category %} category-{{ page.category }}{% endif %}{% 
if page.layout-class %} layout-{{ page.layout-class }}{% endif %}">
 
     <div class="navbar navbar-inverse navbar-fixed-top" role="navigation">
@@ -100,23 +100,6 @@
             </li>
 
 
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
         </ul>
         </nav><!--/.navbar-collapse -->
       </div>
@@ -132,4 +115,4 @@
         </div>
     </article>
 
-{% include footer.html %}
+{% include themes/mahout/footer.html %}

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_pages/developer-resources.md
----------------------------------------------------------------------
diff --git a/website/_pages/developer-resources.md 
b/website/_pages/developer-resources.md
deleted file mode 100644
index 35d56eb..0000000
--- a/website/_pages/developer-resources.md
+++ /dev/null
@@ -1,50 +0,0 @@
----
-layout: mahout
-title: Developer Resources
-permalink: /developers/
----
-<a name="DeveloperResources-MakingaContribution"></a>
-## Making a Contribution
-
-Mahout is always looking for contributions, especially in the areas of
-documentation. See our [How to contribute](/developers/how-to-contribute.html) 
page for details.
-
-
-<a name="DeveloperResources-SourceCode"></a>
-## Source Code
-
-The source files are stored using Git, our page on [version 
control](/developers/version-control.html) has details on how to access the 
sourcecode.
-
-
-<a name="DeveloperResources-Documentation"></a>
-## Documentation
-
-Javadoc and Scaladoc documentation is available online by module:
-
- * [Mahout 
Math](http://apache.github.io/mahout/0.10.1/docs/mahout-math/index.html)
- * [Mahout Math Scala 
bindings](http://apache.github.io/mahout/0.10.1/docs/mahout-math-scala/index.html)
- * [Mahout Spark 
bindings](http://apache.github.io/mahout/0.10.1/docs/mahout-spark/index.html)
- * [Mahout Spark bindings 
shell](http://apache.github.io/mahout/0.10.1/docs/mahout-spark-shell/index.html)
- * [Mahout H2O backend 
Scaladoc](http://apache.github.io/mahout/0.10.1/docs/mahout-h2o/scaladocs/index.html)
- * [Mahout H2O backend 
Javadoc](http://apache.github.io/mahout/0.10.1/docs/mahout-h2o/javadoc/index.html)
- * [Mahout 
HDFS](http://apache.github.io/mahout/0.10.1/docs/mahout-hdfs/index.html)
- * [Mahout 
Map-Reduce](http://apache.github.io/mahout/0.10.1/docs/mahout-mr/index.html)
- * [Mahout 
Examples](http://apache.github.io/mahout/0.10.1/docs/mahout-examples/index.html)
- * [Mahout 
Integration](http://apache.github.io/mahout/0.10.1/docs/mahout-integration/index.html)
-
-
-<a name="DeveloperResources-Issues"></a>
-## Issues
-
-All bugs, improvements, [pull 
requests](http://mahout.apache.org/developers/github.html), etc. should be 
logged in our [issue tracker](/developers/issue-tracker.html).
-
-<a name="DeveloperResources-ContinuousIntegration"></a>
-## Continuous Integration
-
-Mahout is continuously built on an hourly basis on the [Apache 
Jenkins](https://builds.apache.org/job/Mahout-Quality/)  build system.
-
-## Board reports
-
-Every three months Mahout submits a report to the Apache board. All of the 
drafts that get sent are checked into svn. See here:
-
-<a 
href="https://svn.apache.org/repos/asf/mahout/pmc/board-reports/";>https://svn.apache.org/repos/asf/mahout/pmc/board-reports/</a>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_pages/docs/0.13.0/algorithms/d-als.md
----------------------------------------------------------------------
diff --git a/website/_pages/docs/0.13.0/algorithms/d-als.md 
b/website/_pages/docs/0.13.0/algorithms/d-als.md
deleted file mode 100644
index ed44c99..0000000
--- a/website/_pages/docs/0.13.0/algorithms/d-als.md
+++ /dev/null
@@ -1,57 +0,0 @@
----
-layout: mahoutdoc
-title: Mahout Samsara Distributed ALS
-permalink: /docs/0.13.0/algorithms/samsara/dals
----
-# Distributed Cholesky QR
-
-
-## Intro
-
-Mahout has a distributed implementation of QR decomposition for tall thin 
matricies[1].
-
-## Algorithm 
-
-For the classic QR decomposition of the form 
`\(\mathbf{A}=\mathbf{QR},\mathbf{A}\in\mathbb{R}^{m\times n}\)` a distributed 
version is fairly easily achieved if `\(\mathbf{A}\)` is tall and thin such 
that `\(\mathbf{A}^{\top}\mathbf{A}\)` fits in memory, i.e. *m* is large but 
*n* < ~5000 Under such circumstances, only `\(\mathbf{A}\)` and 
`\(\mathbf{Q}\)` are distributed matricies and `\(\mathbf{A^{\top}A}\)` and 
`\(\mathbf{R}\)` are in-core products. We just compute the in-core version of 
the Cholesky decomposition in the form of `\(\mathbf{LL}^{\top}= 
\mathbf{A}^{\top}\mathbf{A}\)`.  After that we take `\(\mathbf{R}= 
\mathbf{L}^{\top}\)` and 
`\(\mathbf{Q}=\mathbf{A}\left(\mathbf{L}^{\top}\right)^{-1}\)`.  The latter is 
easily achieved by multiplying each verticle block of `\(\mathbf{A}\)` by 
`\(\left(\mathbf{L}^{\top}\right)^{-1}\)`.  (There is no actual matrix 
inversion happening). 
-
-
-
-## Implementation
-
-Mahout `dqrThin(...)` is implemented in the mahout `math-scala` algebraic 
optimizer which translates Mahout's R-like linear algebra operators into a 
physical plan for both Spark and H2O distributed engines.
-
-    def dqrThin[K: ClassTag](A: DrmLike[K], checkRankDeficiency: Boolean = 
true): (DrmLike[K], Matrix) = {        
-        if (drmA.ncol > 5000)
-            log.warn("A is too fat. A'A must fit in memory and easily 
broadcasted.")
-        implicit val ctx = drmA.context
-        val AtA = (drmA.t %*% drmA).checkpoint()
-        val inCoreAtA = AtA.collect
-        val ch = chol(inCoreAtA)
-        val inCoreR = (ch.getL cloned) t
-        if (checkRankDeficiency && !ch.isPositiveDefinite)
-            throw new IllegalArgumentException("R is rank-deficient.")
-        val bcastAtA = sc.broadcast(inCoreAtA)
-        val Q = A.mapBlock() {
-            case (keys, block) => keys -> chol(bcastAtA).solveRight(block)
-        }
-        Q -> inCoreR
-    }
-
-
-## Usage
-
-The scala `dqrThin(...)` method can easily be called in any Spark or H2O 
application built with the `math-scala` library and the corresponding `Spark` 
or `H2O` engine module as follows:
-
-    import org.apache.mahout.math._
-    import decompositions._
-    import drm._
-    
-    val(drmQ, inCoreR) = dqrThin(drma)
-
- 
-## References
-
-[1]: [Mahout Scala and Mahout Spark Bindings for Linear Algebra 
Subroutines](http://mahout.apache.org/users/sparkbindings/ScalaSparkBindings.pdf)
-
-[2]: [Mahout Spark and Scala 
Bindings](http://mahout.apache.org/users/sparkbindings/home.html)
-

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_pages/docs/0.13.0/algorithms/d-qr.md
----------------------------------------------------------------------
diff --git a/website/_pages/docs/0.13.0/algorithms/d-qr.md 
b/website/_pages/docs/0.13.0/algorithms/d-qr.md
deleted file mode 100644
index 65ae9b2..0000000
--- a/website/_pages/docs/0.13.0/algorithms/d-qr.md
+++ /dev/null
@@ -1,57 +0,0 @@
----
-layout: mahoutdoc
-title: Mahout Samsara DQR
-permalink: /docs/0.13.0/algorithms/samsara/dqr
----
-# Distributed Cholesky QR
-
-
-## Intro
-
-Mahout has a distributed implementation of QR decomposition for tall thin 
matrices[1].
-
-## Algorithm 
-
-For the classic QR decomposition of the form 
`\(\mathbf{A}=\mathbf{QR},\mathbf{A}\in\mathbb{R}^{m\times n}\)` a distributed 
version is fairly easily achieved if `\(\mathbf{A}\)` is tall and thin such 
that `\(\mathbf{A}^{\top}\mathbf{A}\)` fits in memory, i.e. *m* is large but 
*n* < ~5000 Under such circumstances, only `\(\mathbf{A}\)` and 
`\(\mathbf{Q}\)` are distributed matrices and `\(\mathbf{A^{\top}A}\)` and 
`\(\mathbf{R}\)` are in-core products. We just compute the in-core version of 
the Cholesky decomposition in the form of `\(\mathbf{LL}^{\top}= 
\mathbf{A}^{\top}\mathbf{A}\)`.  After that we take `\(\mathbf{R}= 
\mathbf{L}^{\top}\)` and 
`\(\mathbf{Q}=\mathbf{A}\left(\mathbf{L}^{\top}\right)^{-1}\)`.  The latter is 
easily achieved by multiplying each vertical block of `\(\mathbf{A}\)` by 
`\(\left(\mathbf{L}^{\top}\right)^{-1}\)`.  (There is no actual matrix 
inversion happening). 
-
-
-
-## Implementation
-
-Mahout `dqrThin(...)` is implemented in the mahout `math-scala` algebraic 
optimizer which translates Mahout's R-like linear algebra operators into a 
physical plan for both Spark and H2O distributed engines.
-
-    def dqrThin[K: ClassTag](A: DrmLike[K], checkRankDeficiency: Boolean = 
true): (DrmLike[K], Matrix) = {        
-        if (drmA.ncol > 5000)
-            log.warn("A is too fat. A'A must fit in memory and easily 
broadcasted.")
-        implicit val ctx = drmA.context
-        val AtA = (drmA.t %*% drmA).checkpoint()
-        val inCoreAtA = AtA.collect
-        val ch = chol(inCoreAtA)
-        val inCoreR = (ch.getL cloned) t
-        if (checkRankDeficiency && !ch.isPositiveDefinite)
-            throw new IllegalArgumentException("R is rank-deficient.")
-        val bcastAtA = sc.broadcast(inCoreAtA)
-        val Q = A.mapBlock() {
-            case (keys, block) => keys -> chol(bcastAtA).solveRight(block)
-        }
-        Q -> inCoreR
-    }
-
-
-## Usage
-
-The scala `dqrThin(...)` method can easily be called in any Spark or H2O 
application built with the `math-scala` library and the corresponding `Spark` 
or `H2O` engine module as follows:
-
-    import org.apache.mahout.math._
-    import decompositions._
-    import drm._
-    
-    val(drmQ, inCoreR) = dqrThin(drma)
-
- 
-## References
-
-[1]: [Mahout Scala and Mahout Spark Bindings for Linear Algebra 
Subroutines](http://mahout.apache.org/users/sparkbindings/ScalaSparkBindings.pdf)
-
-[2]: [Mahout Spark and Scala 
Bindings](http://mahout.apache.org/users/sparkbindings/home.html)
-

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_pages/docs/0.13.0/algorithms/d-spca.md
----------------------------------------------------------------------
diff --git a/website/_pages/docs/0.13.0/algorithms/d-spca.md 
b/website/_pages/docs/0.13.0/algorithms/d-spca.md
deleted file mode 100644
index 3505c6c..0000000
--- a/website/_pages/docs/0.13.0/algorithms/d-spca.md
+++ /dev/null
@@ -1,174 +0,0 @@
----
-layout: mahoutdoc
-title: Mahout Samsara Dist Sto PCA
-permalink: /docs/0.13.0/algorithms/samsara/dspca
----
-# Distributed Stochastic PCA
-
-
-## Intro
-
-Mahout has a distributed implementation of Stochastic PCA[1]. This algorithm 
computes the exact equivalent of Mahout's dssvd(`\(\mathbf{A-1\mu^\top}\)`) by 
modifying the `dssvd` algorithm so as to avoid forming 
`\(\mathbf{A-1\mu^\top}\)`, which would densify a sparse input. Thus, it is 
suitable for work with both dense and sparse inputs.
-
-## Algorithm
-
-Given an *m* `\(\times\)` *n* matrix `\(\mathbf{A}\)`, a target rank *k*, and 
an oversampling parameter *p*, this procedure computes a *k*-rank PCA by 
finding the unknowns in `\(\mathbf{A−1\mu^\top \approx U\Sigma V^\top}\)`:
-
-1. Create seed for random *n* `\(\times\)` *(k+p)* matrix `\(\Omega\)`.
-2. `\(\mathbf{s_\Omega \leftarrow \Omega^\top \mu}\)`.
-3. `\(\mathbf{Y_0 \leftarrow A\Omega − 1 {s_\Omega}^\top, Y \in 
\mathbb{R}^{m\times(k+p)}}\)`.
-4. Column-orthonormalize `\(\mathbf{Y_0} \rightarrow \mathbf{Q}\)` by 
computing thin decomposition `\(\mathbf{Y_0} = \mathbf{QR}\)`. Also, 
`\(\mathbf{Q}\in\mathbb{R}^{m\times(k+p)}, 
\mathbf{R}\in\mathbb{R}^{(k+p)\times(k+p)}\)`.
-5. `\(\mathbf{s_Q \leftarrow Q^\top 1}\)`.
-6. `\(\mathbf{B_0 \leftarrow Q^\top A: B \in \mathbb{R}^{(k+p)\times n}}\)`.
-7. `\(\mathbf{s_B \leftarrow {B_0}^\top \mu}\)`.
-8. For *i* in 1..*q* repeat (power iterations):
-    - For *j* in 1..*n* apply `\(\mathbf{(B_{i−1})_{∗j} \leftarrow 
(B_{i−1})_{∗j}−\mu_j s_Q}\)`.
-    - `\(\mathbf{Y_i \leftarrow A{B_{i−1}}^\top−1(s_B−\mu^\top \mu 
s_Q)^\top}\)`.
-    - Column-orthonormalize `\(\mathbf{Y_i} \rightarrow \mathbf{Q}\)` by 
computing thin decomposition `\(\mathbf{Y_i = QR}\)`.
-    - `\(\mathbf{s_Q \leftarrow Q^\top 1}\)`.
-    - `\(\mathbf{B_i \leftarrow Q^\top A}\)`.
-    - `\(\mathbf{s_B \leftarrow {B_i}^\top \mu}\)`.
-9. Let `\(\mathbf{C \triangleq s_Q {s_B}^\top}\)`. `\(\mathbf{M \leftarrow B_q 
{B_q}^\top − C − C^\top + \mu^\top \mu s_Q {s_Q}^\top}\)`.
-10. Compute an eigensolution of the small symmetric `\(\mathbf{M = \hat{U} 
\Lambda \hat{U}^\top: M \in \mathbb{R}^{(k+p)\times(k+p)}}\)`.
-11. The singular values `\(\Sigma = \Lambda^{\circ 0.5}\)`, or, in other 
words, `\(\mathbf{\sigma_i= \sqrt{\lambda_i}}\)`.
-12. If needed, compute `\(\mathbf{U = Q\hat{U}}\)`.
-13. If needed, compute `\(\mathbf{V = B^\top \hat{U} \Sigma^{−1}}\)`.
-14. If needed, items converted to the PCA space can be computed as 
`\(\mathbf{U\Sigma}\)`.
-
-## Implementation
-
-Mahout `dspca(...)` is implemented in the mahout `math-scala` algebraic 
optimizer which translates Mahout's R-like linear algebra operators into a 
physical plan for both Spark and H2O distributed engines.
-
-    def dspca[K](drmA: DrmLike[K], k: Int, p: Int = 15, q: Int = 0): 
-    (DrmLike[K], DrmLike[Int], Vector) = {
-
-        // Some mapBlock() calls need it
-        implicit val ktag =  drmA.keyClassTag
-
-        val drmAcp = drmA.checkpoint()
-        implicit val ctx = drmAcp.context
-
-        val m = drmAcp.nrow
-       val n = drmAcp.ncol
-        assert(k <= (m min n), "k cannot be greater than smaller of m, n.")
-        val pfxed = safeToNonNegInt((m min n) - k min p)
-
-        // Actual decomposition rank
-        val r = k + pfxed
-
-        // Dataset mean
-        val mu = drmAcp.colMeans
-
-        val mtm = mu dot mu
-
-        // We represent Omega by its seed.
-        val omegaSeed = RandomUtils.getRandom().nextInt()
-        val omega = Matrices.symmetricUniformView(n, r, omegaSeed)
-
-        // This done in front in a single-threaded fashion for now. Even 
though it doesn't require any
-        // memory beyond that is required to keep xi around, it still might be 
parallelized to backs
-        // for significantly big n and r. TODO
-        val s_o = omega.t %*% mu
-
-        val bcastS_o = drmBroadcast(s_o)
-        val bcastMu = drmBroadcast(mu)
-
-        var drmY = drmAcp.mapBlock(ncol = r) {
-            case (keys, blockA) ⇒
-                val s_o:Vector = bcastS_o
-                val blockY = blockA %*% Matrices.symmetricUniformView(n, r, 
omegaSeed)
-                for (row ← 0 until blockY.nrow) blockY(row, ::) -= s_o
-                keys → blockY
-        }
-                // Checkpoint Y
-                .checkpoint()
-
-        var drmQ = dqrThin(drmY, checkRankDeficiency = false)._1.checkpoint()
-
-        var s_q = drmQ.colSums()
-        var bcastVarS_q = drmBroadcast(s_q)
-
-        // This actually should be optimized as identically partitioned 
map-side A'B since A and Q should
-        // still be identically partitioned.
-        var drmBt = (drmAcp.t %*% drmQ).checkpoint()
-
-        var s_b = (drmBt.t %*% mu).collect(::, 0)
-        var bcastVarS_b = drmBroadcast(s_b)
-
-        for (i ← 0 until q) {
-
-            // These closures don't seem to live well with outside-scope vars. 
This doesn't record closure
-            // attributes correctly. So we create additional set of vals for 
broadcast vars to properly
-            // create readonly closure attributes in this very scope.
-            val bcastS_q = bcastVarS_q
-            val bcastMuInner = bcastMu
-
-            // Fix Bt as B' -= xi cross s_q
-            drmBt = drmBt.mapBlock() {
-                case (keys, block) ⇒
-                    val s_q: Vector = bcastS_q
-                    val mu: Vector = bcastMuInner
-                    keys.zipWithIndex.foreach {
-                        case (key, idx) ⇒ block(idx, ::) -= s_q * mu(key)
-                    }
-                    keys → block
-            }
-
-            drmY.uncache()
-            drmQ.uncache()
-
-            val bCastSt_b = drmBroadcast(s_b -=: mtm * s_q)
-
-            drmY = (drmAcp %*% drmBt)
-                // Fix Y by subtracting st_b from each row of the AB'
-                .mapBlock() {
-                case (keys, block) ⇒
-                    val st_b: Vector = bCastSt_b
-                    block := { (_, c, v) ⇒ v - st_b(c) }
-                    keys → block
-            }
-            // Checkpoint Y
-            .checkpoint()
-
-            drmQ = dqrThin(drmY, checkRankDeficiency = false)._1.checkpoint()
-
-            s_q = drmQ.colSums()
-            bcastVarS_q = drmBroadcast(s_q)
-
-            // This on the other hand should be inner-join-and-map A'B 
optimization since A and Q_i are not
-            // identically partitioned anymore.
-            drmBt = (drmAcp.t %*% drmQ).checkpoint()
-
-            s_b = (drmBt.t %*% mu).collect(::, 0)
-            bcastVarS_b = drmBroadcast(s_b)
-        }
-
-        val c = s_q cross s_b
-        val inCoreBBt = (drmBt.t %*% drmBt).checkpoint(CacheHint.NONE).collect 
-=:
-            c -=: c.t +=: mtm *=: (s_q cross s_q)
-        val (inCoreUHat, d) = eigen(inCoreBBt)
-        val s = d.sqrt
-
-        // Since neither drmU nor drmV are actually computed until actually 
used, we don't need the flags
-        // instructing compute (or not compute) either of the U,V outputs 
anymore. Neat, isn't it?
-        val drmU = drmQ %*% inCoreUHat
-        val drmV = drmBt %*% (inCoreUHat %*% diagv(1 / s))
-
-        (drmU(::, 0 until k), drmV(::, 0 until k), s(0 until k))
-    }
-
-## Usage
-
-The scala `dspca(...)` method can easily be called in any Spark, Flink, or H2O 
application built with the `math-scala` library and the corresponding `Spark`, 
`Flink`, or `H2O` engine module as follows:
-
-    import org.apache.mahout.math._
-    import decompositions._
-    import drm._
-    
-    val (drmU, drmV, s) = dspca(drmA, k=200, q=1)
-
-Note the parameter is optional and its default value is zero.
- 
-## References
-
-[1]: Lyubimov and Palumbo, ["Apache Mahout: Beyond MapReduce; Distributed 
Algorithm 
Design"](https://www.amazon.com/Apache-Mahout-MapReduce-Dmitriy-Lyubimov/dp/1523775785)

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_pages/docs/0.13.0/algorithms/d-ssvd.md
----------------------------------------------------------------------
diff --git a/website/_pages/docs/0.13.0/algorithms/d-ssvd.md 
b/website/_pages/docs/0.13.0/algorithms/d-ssvd.md
deleted file mode 100644
index f4ed1a8..0000000
--- a/website/_pages/docs/0.13.0/algorithms/d-ssvd.md
+++ /dev/null
@@ -1,141 +0,0 @@
----
-layout: mahoutdoc
-title: Mahout Samsara DSSVD
-permalink: /docs/0.13.0/algorithms/samsara/dssvd
----
-# Distributed Stochastic Singular Value Decomposition
-
-
-## Intro
-
-Mahout has a distributed implementation of Stochastic Singular Value 
Decomposition [1] using the parallelization strategy comprehensively defined in 
Nathan Halko's dissertation ["Randomized methods for computing low-rank 
approximations of 
matrices"](http://amath.colorado.edu/faculty/martinss/Pubs/2012_halko_dissertation.pdf)
 [2].
-
-## Modified SSVD Algorithm
-
-Given an `\(m\times n\)`
-matrix `\(\mathbf{A}\)`, a target rank `\(k\in\mathbb{N}_{1}\)`
-, an oversampling parameter `\(p\in\mathbb{N}_{1}\)`, 
-and the number of additional power iterations `\(q\in\mathbb{N}_{0}\)`, 
-this procedure computes an `\(m\times\left(k+p\right)\)`
-SVD `\(\mathbf{A\approx U}\boldsymbol{\Sigma}\mathbf{V}^{\top}\)`:
-
-  1. Create seed for random `\(n\times\left(k+p\right)\)`
-  matrix `\(\boldsymbol{\Omega}\)`. The seed defines matrix 
`\(\mathbf{\Omega}\)`
-  using Gaussian unit vectors per one of suggestions in [Halko, Martinsson, 
Tropp].
-
-  2. 
`\(\mathbf{Y=A\boldsymbol{\Omega}},\,\mathbf{Y}\in\mathbb{R}^{m\times\left(k+p\right)}\)`
- 
-  3. Column-orthonormalize `\(\mathbf{Y}\rightarrow\mathbf{Q}\)`
-  by computing thin decomposition `\(\mathbf{Y}=\mathbf{Q}\mathbf{R}\)`.
-  Also, 
`\(\mathbf{Q}\in\mathbb{R}^{m\times\left(k+p\right)},\,\mathbf{R}\in\mathbb{R}^{\left(k+p\right)\times\left(k+p\right)}\)`;
 denoted as `\(\mathbf{Q}=\mbox{qr}\left(\mathbf{Y}\right).\mathbf{Q}\)`
-
-  4. 
`\(\mathbf{B}_{0}=\mathbf{Q}^{\top}\mathbf{A}:\,\,\mathbf{B}\in\mathbb{R}^{\left(k+p\right)\times
 n}\)`.
- 
-  5. If `\(q>0\)`
-  repeat: for `\(i=1..q\)`: 
-  
`\(\mathbf{B}_{i}^{\top}=\mathbf{A}^{\top}\mbox{qr}\left(\mathbf{A}\mathbf{B}_{i-1}^{\top}\right).\mathbf{Q}\)`
-  (power iterations step).
-
-  6. Compute Eigensolution of a small Hermitian 
`\(\mathbf{B}_{q}\mathbf{B}_{q}^{\top}=\mathbf{\hat{U}}\boldsymbol{\Lambda}\mathbf{\hat{U}}^{\top}\)`,
-  
`\(\mathbf{B}_{q}\mathbf{B}_{q}^{\top}\in\mathbb{R}^{\left(k+p\right)\times\left(k+p\right)}\)`.
- 
-  7. Singular values 
`\(\mathbf{\boldsymbol{\Sigma}}=\boldsymbol{\Lambda}^{0.5}\)`,
-  or, in other words, `\(s_{i}=\sqrt{\sigma_{i}}\)`.
- 
-  8. If needed, compute `\(\mathbf{U}=\mathbf{Q}\hat{\mathbf{U}}\)`.
-
-  9. If needed, compute 
`\(\mathbf{V}=\mathbf{B}_{q}^{\top}\hat{\mathbf{U}}\boldsymbol{\Sigma}^{-1}\)`.
-Another way is 
`\(\mathbf{V}=\mathbf{A}^{\top}\mathbf{U}\boldsymbol{\Sigma}^{-1}\)`.
-
-
-
-
-## Implementation
-
-Mahout `dssvd(...)` is implemented in the mahout `math-scala` algebraic 
optimizer which translates Mahout's R-like linear algebra operators into a 
physical plan for both Spark and H2O distributed engines.
-
-    def dssvd[K: ClassTag](drmA: DrmLike[K], k: Int, p: Int = 15, q: Int = 0):
-        (DrmLike[K], DrmLike[Int], Vector) = {
-
-        val drmAcp = drmA.checkpoint()
-
-        val m = drmAcp.nrow
-        val n = drmAcp.ncol
-        assert(k <= (m min n), "k cannot be greater than smaller of m, n.")
-        val pfxed = safeToNonNegInt((m min n) - k min p)
-
-        // Actual decomposition rank
-        val r = k + pfxed
-
-        // We represent Omega by its seed.
-        val omegaSeed = RandomUtils.getRandom().nextInt()
-
-        // Compute Y = A*Omega.  
-        var drmY = drmAcp.mapBlock(ncol = r) {
-            case (keys, blockA) =>
-                val blockY = blockA %*% Matrices.symmetricUniformView(n, r, 
omegaSeed)
-            keys -> blockY
-        }
-
-        var drmQ = dqrThin(drmY.checkpoint())._1
-
-        // Checkpoint Q if last iteration
-        if (q == 0) drmQ = drmQ.checkpoint()
-
-        var drmBt = drmAcp.t %*% drmQ
-        
-        // Checkpoint B' if last iteration
-        if (q == 0) drmBt = drmBt.checkpoint()
-
-        for (i <- 0  until q) {
-            drmY = drmAcp %*% drmBt
-            drmQ = dqrThin(drmY.checkpoint())._1            
-            
-            // Checkpoint Q if last iteration
-            if (i == q - 1) drmQ = drmQ.checkpoint()
-            
-            drmBt = drmAcp.t %*% drmQ
-            
-            // Checkpoint B' if last iteration
-            if (i == q - 1) drmBt = drmBt.checkpoint()
-        }
-
-        val (inCoreUHat, d) = eigen(drmBt.t %*% drmBt)
-        val s = d.sqrt
-
-        // Since neither drmU nor drmV are actually computed until actually 
used
-        // we don't need the flags instructing compute (or not compute) either 
of the U,V outputs 
-        val drmU = drmQ %*% inCoreUHat
-        val drmV = drmBt %*% (inCoreUHat %*%: diagv(1 /: s))
-
-        (drmU(::, 0 until k), drmV(::, 0 until k), s(0 until k))
-    }
-
-Note: As a side effect of checkpointing, U and V values are returned as 
logical operators (i.e. they are neither checkpointed nor computed).  Therefore 
there is no physical work actually done to compute `\(\mathbf{U}\)` or 
`\(\mathbf{V}\)` until they are used in a subsequent expression.
-
-
-## Usage
-
-The scala `dssvd(...)` method can easily be called in any Spark or H2O 
application built with the `math-scala` library and the corresponding `Spark` 
or `H2O` engine module as follows:
-
-    import org.apache.mahout.math._
-    import decompositions._
-    import drm._
-    
-    
-    val(drmU, drmV, s) = dssvd(drma, k = 40, q = 1)
-
- 
-## References
-
-[1]: [Mahout Scala and Mahout Spark Bindings for Linear Algebra 
Subroutines](http://mahout.apache.org/users/sparkbindings/ScalaSparkBindings.pdf)
-
-[2]: [Randomized methods for computing low-rank
-approximations of 
matrices](http://amath.colorado.edu/faculty/martinss/Pubs/2012_halko_dissertation.pdf)
-
-[2]: [Halko, Martinsson, Tropp](http://arxiv.org/abs/0909.4061)
-
-[3]: [Mahout Spark and Scala 
Bindings](http://mahout.apache.org/users/sparkbindings/home.html)
-
-
-

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_pages/docs/0.13.0/mahout-samsara/faq.md
----------------------------------------------------------------------
diff --git a/website/_pages/docs/0.13.0/mahout-samsara/faq.md 
b/website/_pages/docs/0.13.0/mahout-samsara/faq.md
deleted file mode 100644
index 2e5301c..0000000
--- a/website/_pages/docs/0.13.0/mahout-samsara/faq.md
+++ /dev/null
@@ -1,50 +0,0 @@
----
-layout: mahoutdoc
-title: Mahout Samsara
-permalink: /docs/0.13.0/mahout-samsara/faq/
----
-# FAQ for using Mahout with Spark
-
-**Q: Mahout Spark shell doesn't start; "ClassNotFound" problems or various 
classpath problems.**
-
-**A:** So far as of the time of this writing all reported problems starting 
the Spark shell in Mahout were revolving 
-around classpath issues one way or another. 
-
-If you are getting method signature like errors, most probably you have 
mismatch between Mahout's Spark dependency 
-and actual Spark installed. (At the time of this writing the HEAD depends on 
Spark 1.1.0) but check mahout/pom.xml.
-
-Troubleshooting general classpath issues is pretty straightforward. Since 
Mahout is using Spark's installation 
-and its classpath as reported by Spark itself for Spark-related dependencies, 
it is important to make sure 
-the classpath is sane and is made available to Mahout:
-
-1. Check Spark is of correct version (same as in Mahout's poms), is compiled 
and SPARK_HOME is set.
-2. Check Mahout is compiled and MAHOUT_HOME is set.
-3. Run `$SPARK_HOME/bin/compute-classpath.sh` and make sure it produces sane 
result with no errors. 
-If it outputs something other than a straightforward classpath string, most 
likely Spark is not compiled/set correctly (later spark versions require 
-`sbt/sbt assembly` to be run, simply runnig `sbt/sbt publish-local` is not 
enough any longer).
-4. Run `$MAHOUT_HOME/bin/mahout -spark classpath` and check that path reported 
in step (3) is included.
-
-**Q: I am using the command line Mahout jobs that run on Spark or am writing 
my own application that uses 
-Mahout's Spark code. When I run the code on my cluster I get ClassNotFound or 
signature errors during serialization. 
-What's wrong?**
- 
-**A:** The Spark artifacts in the maven ecosystem may not match the exact 
binary you are running on your cluster. This may 
-cause class name or version mismatches. In this case you may wish 
-to build Spark yourself to guarantee that you are running exactly what you are 
building Mahout against. To do this follow these steps
-in order:
-
-1. Build Spark with maven, but **do not** use the "package" target as 
described on the Spark site. Build with the "clean install" target instead. 
-Something like: "mvn clean install -Dhadoop1.2.1" or whatever your particular 
build options are. This will put the jars for Spark
-in the local maven cache.
-2. Deploy **your** Spark build to your cluster and test it there.
-3. Build Mahout. This will cause maven to pull the jars for Spark from the 
local maven cache and may resolve missing 
-or mis-identified classes.
-4. if you are building your own code do so against the local builds of Spark 
and Mahout.
-
-**Q: The implicit SparkContext 'sc' does not work in the Mahout spark-shell.**
-
-**A:** In the Mahout spark-shell the SparkContext is called 'sdc', where the 
'd' stands for distributed. 
-
-
-
-

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_pages/docs/0.13.0/mahout-samsara/flink-bindings.md
----------------------------------------------------------------------
diff --git a/website/_pages/docs/0.13.0/mahout-samsara/flink-bindings.md 
b/website/_pages/docs/0.13.0/mahout-samsara/flink-bindings.md
deleted file mode 100644
index 8d9260a..0000000
--- a/website/_pages/docs/0.13.0/mahout-samsara/flink-bindings.md
+++ /dev/null
@@ -1,48 +0,0 @@
----
-layout: mahoutdoc
-title: Mahout Samsara Flink
-permalink: /docs/0.13.0/mahout-samsara/flink-bindings/
----
-#Introduction
-
-This document provides an overview of how the Mahout Samsara environment is 
implemented over the Apache Flink backend engine. This document gives an 
overview of the code layout for the Flink backend engine, the source code for 
which can be found under /flink directory in the Mahout codebase.
-
-Apache Flink is a distributed big data streaming engine that supports both 
Streaming and Batch interfaces. Batch processing is an extension of Flink’s 
Stream processing engine.
-
-The Mahout Flink integration presently supports Flink’s batch processing 
capabilities leveraging the DataSet API.
-
-The Mahout DRM, or Distributed Row Matrix, is an abstraction for storing a 
large matrix of numbers in-memory in a cluster by distributing logical rows 
among servers. Mahout's scala DSL provides an abstract API on DRMs for backend 
engines to provide implementations of this API. An example is the Spark backend 
engine. Each engine has it's own design of mapping the abstract API onto its 
data model and provides implementations for algebraic operators over that 
mapping.
-
-#Flink Overview
-
-Apache Flink is an open source, distributed Stream and Batch Processing 
Framework. At it's core, Flink is a Stream Processing engine and Batch 
processing is an extension of Stream Processing. 
-
-Flink includes several APIs for building applications with the Flink Engine:
-
- <ol>
-<li><b>DataSet API</b> for Batch data in Java, Scala and Python</li>
-<li><b>DataStream API</b> for Stream Processing in Java and Scala</li>
-<li><b>Table API</b> with SQL-like regular expression language in Java and 
Scala</li>
-<li><b>Gelly</b> Graph Processing API in Java and Scala</li>
-<li><b>CEP API</b>, a complex event processing library</li>
-<li><b>FlinkML</b>, a Machine Learning library</li>
-</ol>
-#Flink Environment Engine
-
-The Flink backend implements the abstract DRM as a Flink DataSet. A Flink job 
runs in the context of an ExecutionEnvironment (from the Flink Batch processing 
API).
-
-#Source Layout
-
-Within mahout.git, the top level directory, flink/ holds all the source code 
for the Flink backend engine. Sections of code that interface with the rest of 
the Mahout components are in Scala, and sections of the code that interface 
with Flink DataSet API and implement algebraic operators are in Java. Here is a 
brief overview of what functionality can be found within flink/ folder.
-
-flink/ - top level directory containing all Flink related code
-
-flink/src/main/scala/org/apache/mahout/flinkbindings/blas/*.scala - Physical 
operator code for the Samsara DSL algebra
-
-flink/src/main/scala/org/apache/mahout/flinkbindings/drm/*.scala - Flink 
Dataset DRM and broadcast implementation
-
-flink/src/main/scala/org/apache/mahout/flinkbindings/io/*.scala - Read / Write 
between DRMDataSet and files on HDFS
-
-flink/src/main/scala/org/apache/mahout/flinkbindings/FlinkEngine.scala - DSL 
operator graph evaluator and various abstract API implementations for a 
distributed engine.
-
-

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_pages/docs/0.13.0/mahout-samsara/h2o-internals.md
----------------------------------------------------------------------
diff --git a/website/_pages/docs/0.13.0/mahout-samsara/h2o-internals.md 
b/website/_pages/docs/0.13.0/mahout-samsara/h2o-internals.md
deleted file mode 100644
index a6ae6e6..0000000
--- a/website/_pages/docs/0.13.0/mahout-samsara/h2o-internals.md
+++ /dev/null
@@ -1,49 +0,0 @@
----
-layout: mahoutdoc
-title: Mahout Samsara H20
-permalink: /docs/0.13.0/mahout-samsara/h20-bindings/
----
-# Introduction
- 
-This document provides an overview of how the Mahout Samsara environment is 
implemented over the H2O backend engine. The document is aimed at Mahout 
developers, to give a high level description of the design so that one can 
explore the code inside `h2o/` with some context.
-
-## H2O Overview
-
-H2O is a distributed scalable machine learning system. Internal architecture 
of H2O has a distributed math engine (h2o-core) and a separate layer on top for 
algorithms and UI. The Mahout integration requires only the math engine 
(h2o-core).
-
-## H2O Data Model
-
-The data model of the H2O math engine is a distributed columnar store (of 
primarily numbers, but also strings). A column of numbers is called a Vector, 
which is broken into Chunks (of a few thousand elements). Chunks are 
distributed across the cluster based on a deterministic hash. Therefore, any 
member of the cluster knows where a particular Chunk of a Vector is homed. Each 
Chunk is separately compressed in memory and elements are individually 
decompressed on the fly upon access with purely register operations (thereby 
achieving high memory throughput). An ordered set of similarly partitioned Vecs 
are composed into a Frame. A Frame is therefore a large two dimensional table 
of numbers. All elements of a logical row in the Frame are guaranteed to be 
homed in the same server of the cluster. Generally speaking, H2O works well on 
"tall skinny" data, i.e, lots of rows (100s of millions) and modest number of 
columns (10s of thousands).
-
-
-## Mahout DRM
-
-The Mahout DRM, or Distributed Row Matrix, is an abstraction for storing a 
large matrix of numbers in-memory in a cluster by distributing logical rows 
among servers. Mahout's scala DSL provides an abstract API on DRMs for backend 
engines to provide implementations of this API. Examples are the Spark and H2O 
backend engines. Each engine has it's own design of mapping the abstract API 
onto its data model and provides implementations for algebraic operators over 
that mapping.
-
-
-## H2O Environment Engine
-
-The H2O backend implements the abstract DRM as an H2O Frame. Each logical 
column in the DRM is an H2O Vector. All elements of a logical DRM row are 
guaranteed to be homed on the same server. A set of rows stored on a server are 
presented as a read-only virtual in-core Matrix (i.e BlockMatrix) for the 
closure method in the `mapBlock(...)` API.
-
-H2O provides a flexible execution framework called `MRTask`. The `MRTask` 
framework typically executes over a Frame (or even a Vector), supports various 
types of map() methods, can optionally modify the Frame or Vector (though this 
never happens in the Mahout integration), and optionally create a new Vector or 
set of Vectors (to combine them into a new Frame, and consequently a new DRM).
-
-
-## Source Layout
-
-Within mahout.git, the top level directory, `h2o/` holds all the source code 
related to the H2O backend engine. Part of the code (that interfaces with the 
rest of the Mahout componenets) is in Scala, and part of the code (that 
interfaces with h2o-core and implements algebraic operators) is in Java. Here 
is a brief overview of what functionality can be found where within `h2o/`.
-
-  h2o/ - top level directory containing all H2O related code
-
-  h2o/src/main/java/org/apache/mahout/h2obindings/ops/*.java - Physical 
operator code for the various DSL algebra
-
-  h2o/src/main/java/org/apache/mahout/h2obindings/drm/*.java - DRM backing 
(onto Frame) and Broadcast implementation
-
-  h2o/src/main/java/org/apache/mahout/h2obindings/H2OHdfs.java - Read / Write 
between DRM (Frame) and files on HDFS
-
-  h2o/src/main/java/org/apache/mahout/h2obindings/H2OBlockMatrix.java - A 
vertical block matrix of DRM presented as a virtual copy-on-write in-core 
Matrix. Used in mapBlock() API
-
-  h2o/src/main/java/org/apache/mahout/h2obindings/H2OHelper.java - A 
collection of various functionality and helpers. For e.g, convert between 
in-core Matrix and DRM, various summary statistics on DRM/Frame.
-
-  h2o/src/main/scala/org/apache/mahout/h2obindings/H2OEngine.scala - DSL 
operator graph evaluator and various abstract API implementations for a 
distributed engine
-
-  h2o/src/main/scala/org/apache/mahout/h2obindings/* - Various abstract API 
implementations ("glue work")
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_pages/docs/0.13.0/mahout-samsara/in-core-reference.md
----------------------------------------------------------------------
diff --git a/website/_pages/docs/0.13.0/mahout-samsara/in-core-reference.md 
b/website/_pages/docs/0.13.0/mahout-samsara/in-core-reference.md
deleted file mode 100644
index d754fdb..0000000
--- a/website/_pages/docs/0.13.0/mahout-samsara/in-core-reference.md
+++ /dev/null
@@ -1,302 +0,0 @@
----
-layout: mahoutdoc
-title: Mahout Samsara In Core
-permalink: /docs/0.13.0/mahout-samsara/incore/
----
-## Mahout-Samsara's In-Core Linear Algebra DSL Reference
-
-#### Imports
-
-The following imports are used to enable Mahout-Samsara's Scala DSL bindings 
for in-core Linear Algebra:
-
-    import org.apache.mahout.math._
-    import scalabindings._
-    import RLikeOps._
-    
-#### Inline initalization
-
-Dense vectors:
-
-    val densVec1: Vector = (1.0, 1.1, 1.2)
-    val denseVec2 = dvec(1, 0, 1,1 ,1,2)
-
-Sparse vectors:
-
-    val sparseVec1: Vector = (5 -> 1.0) :: (10 -> 2.0) :: Nil
-    val sparseVec1 = svec((5 -> 1.0) :: (10 -> 2.0) :: Nil)
-
-    // to create a vector with specific cardinality
-    val sparseVec1 = svec((5 -> 1.0) :: (10 -> 2.0) :: Nil, cardinality = 20)
-    
-Inline matrix initialization, either sparse or dense, is always done row wise. 
-
-Dense matrices:
-
-    val A = dense((1, 2, 3), (3, 4, 5))
-    
-Sparse matrices:
-
-    val A = sparse(
-              (1, 3) :: Nil,
-              (0, 2) :: (1, 2.5) :: Nil
-                  )
-
-Diagonal matrix with constant diagonal elements:
-
-    diag(3.5, 10)
-
-Diagonal matrix with main diagonal backed by a vector:
-
-    diagv((1, 2, 3, 4, 5))
-    
-Identity matrix:
-
-    eye(10)
-    
-####Slicing and Assigning
-
-Getting a vector element:
-
-    val d = vec(5)
-
-Setting a vector element:
-    
-    vec(5) = 3.0
-    
-Getting a matrix element:
-
-    val d = m(3,5)
-    
-Setting a matrix element:
-
-    M(3,5) = 3.0
-    
-Getting a matrix row or column:
-
-    val rowVec = M(3, ::)
-    val colVec = M(::, 3)
-    
-Setting a matrix row or column via vector assignment:
-
-    M(3, ::) := (1, 2, 3)
-    M(::, 3) := (1, 2, 3)
-    
-Setting a subslices of a matrix row or column:
-
-    a(0, 0 to 1) = (3, 5)
-   
-Setting a subslices of a matrix row or column via vector assignment:
-
-    a(0, 0 to 1) := (3, 5)
-   
-Getting a matrix as from matrix contiguous block:
-
-    val B = A(2 to 3, 3 to 4)
-   
-Assigning a contiguous block to a matrix:
-
-    A(0 to 1, 1 to 2) = dense((3, 2), (3 ,3))
-   
-Assigning a contiguous block to a matrix using the matrix assignment operator:
-
-    A(o to 1, 1 to 2) := dense((3, 2), (3, 3))
-   
-Assignment operator used for copying between vectors or matrices:
-
-    vec1 := vec2
-    M1 := M2
-   
-Assignment operator using assignment through a functional literal for a matrix:
-
-    M := ((row, col, x) => if (row == col) 1 else 0
-    
-Assignment operator using assignment through a functional literal for a vector:
-
-    vec := ((index, x) => sqrt(x)
-    
-#### BLAS-like operations
-
-Plus/minus either vector or numeric with assignment or not:
-
-    a + b
-    a - b
-    a + 5.0
-    a - 5.0
-    
-Hadamard (elementwise) product, either vector or matrix or numeric operands:
-
-    a * b
-    a * 0.5
-
-Operations with assignment:
-
-    a += b
-    a -= b
-    a += 5.0
-    a -= 5.0
-    a *= b
-    a *= 5
-   
-*Some nuanced rules*: 
-
-1/x in R (where x is a vector or a matrix) is elementwise inverse.  In scala 
it would be expressed as:
-
-    val xInv = 1 /: x
-
-and R's 5.0 - x would be:
-   
-    val x1 = 5.0 -: x
-    
-*note: All assignment operations, including :=, return the assignee just like 
in C++*:
-
-    a -= b 
-    
-assigns **a - b** to **b** (in-place) and returns **b**.  Similarly for **a 
/=: b** or **1 /=: v** 
-    
-
-Dot product:
-
-    a dot b
-    
-Matrix and vector equivalency (or non-equivalency).  **Dangerous, exact 
equivalence is rarely useful, better to use norm comparisons with an allowance 
of small errors.**
-    
-    a === b
-    a !== b
-    
-Matrix multiply:    
-
-    a %*% b
-    
-Optimized Right Multiply with a diagonal matrix: 
-
-    diag(5, 5) :%*% b
-   
-Optimized Left Multiply with a diagonal matrix:
-
-    A %*%: diag(5, 5)
-
-Second norm, of a vector or matrix:
-
-    a.norm
-    
-Transpose:
-
-    val Mt = M.t
-    
-*note: Transposition is currently handled via view, i.e. updating a transposed 
matrix will be updating the original.*  Also computing something like 
`\(\mathbf{X^\top}\mathbf{X}\)`:
-
-    val XtX = X.t %*% X
-    
-will not therefore incur any additional data copying.
-
-#### Decompositions
-
-Matrix decompositions require an additional import:
-
-    import org.apache.mahout.math.decompositions._
-
-
-All arguments in the following are matricies.
-
-**Cholesky decomposition**
-
-    val ch = chol(M)
-    
-**SVD**
-
-    val (U, V, s) = svd(M)
-    
-**EigenDecomposition**
-
-    val (V, d) = eigen(M)
-    
-**QR decomposition**
-
-    val (Q, R) = qr(M)
-    
-**Rank**: Check for rank deficiency (runs rank-revealing QR)
-
-    M.isFullRank
-   
-**In-core SSVD**
-
-    Val (U, V, s) = ssvd(A, k = 50, p = 15, q = 1)
-    
-**Solving linear equation systems and matrix inversion:** fully similar to R 
semantics; there are three forms of invocation:
-
-
-Solve `\(\mathbf{AX}=\mathbf{B}\)`:
-
-    solve(A, B)
-   
-Solve `\(\mathbf{Ax}=\mathbf{b}\)`:
-  
-    solve(A, b)
-   
-Compute `\(\mathbf{A^{-1}}\)`:
-
-    solve(A)
-   
-#### Misc
-
-Vector cardinality:
-
-    a.length
-    
-Matrix cardinality:
-
-    m.nrow
-    m.ncol
-    
-Means and sums:
-
-    m.colSums
-    m.colMeans
-    m.rowSums
-    m.rowMeans
-    
-Copy-By-Value:
-
-    val b = a cloned
-    
-#### Random Matrices
-
-`\(\mathcal{U}\)`(0,1) random matrix view:
-
-    val incCoreA = Matrices.uniformView(m, n, seed)
-
-    
-`\(\mathcal{U}\)`(-1,1) random matrix view:
-
-    val incCoreA = Matrices.symmetricUniformView(m, n, seed)
-
-`\(\mathcal{N}\)`(-1,1) random matrix view:
-
-    val incCoreA = Matrices.gaussianView(m, n, seed)
-    
-#### Iterators 
-
-Mahout-Math already exposes a number of iterators.  Scala code just needs the 
following imports to enable implicit conversions to scala iterators.
-
-    import collection._
-    import JavaConversions._
-    
-Iterating over rows in a Matrix:
-
-    for (row <- m) {
-      ... do something with row
-    }
-    
-<!--Iterating over non-zero and all elements of a vector:
-*Note that Vector.Element also has some implicit syntatic sugar, e.g to add 
5.0 to every non-zero element of a matrix, the following code may be used:*
-
-    for (row <- m; el <- row.nonZero) el = 5.0 + el
-    ... or 
-    for (row <- m; el <- row.nonZero) el := 5.0 + el
-    
-Similarly **row.all** produces an iterator over all elements in a row 
(Vector). 
--->
-
-For more information including information on Mahout-Samsara's out-of-core 
Linear algebra bindings see: [Mahout Scala Bindings and Mahout Spark Bindings 
for Linear Algebra 
Subroutines](http://mahout.apache.org/users/sparkbindings/ScalaSparkBindings.pdf)
-
-

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_pages/docs/0.13.0/mahout-samsara/out-of-core-reference.md
----------------------------------------------------------------------
diff --git a/website/_pages/docs/0.13.0/mahout-samsara/out-of-core-reference.md 
b/website/_pages/docs/0.13.0/mahout-samsara/out-of-core-reference.md
deleted file mode 100644
index c6cdddc..0000000
--- a/website/_pages/docs/0.13.0/mahout-samsara/out-of-core-reference.md
+++ /dev/null
@@ -1,316 +0,0 @@
----
-layout: mahoutdoc
-title: Mahout Samsara Out of Core
-permalink: /docs/0.13.0/mahout-samsara/outofcore/
----
-# Mahout-Samsara's Distributed Linear Algebra DSL Reference
-
-**Note: this page is meant only as a quick reference to Mahout-Samsara's 
R-Like DSL semantics.  For more information, including information on 
Mahout-Samsara's Algebraic Optimizer please see: [Mahout Scala Bindings and 
Mahout Spark Bindings for Linear Algebra 
Subroutines](http://mahout.apache.org/users/sparkbindings/ScalaSparkBindings.pdf).**
-
-The subjects of this reference are solely applicable to Mahout-Samsara's 
**DRM** (distributed row matrix).
-
-In this reference, DRMs will be denoted as e.g. `A`, and in-core matrices as 
e.g. `inCoreA`.
-
-#### Imports 
-
-The following imports are used to enable seamless in-core and distributed 
algebraic DSL operations:
-
-    import org.apache.mahout.math._
-    import scalabindings._
-    import RLikeOps._
-    import drm._
-    import RLikeDRMOps._
-    
-If working with mixed scala/java code:
-    
-    import collection._
-    import JavaConversions._
-    
-If you are working with Mahout-Samsara's Spark-specific operations e.g. for 
context creation:
-
-    import org.apache.mahout.sparkbindings._
-    
-The Mahout shell does all of these imports automatically.
-
-
-#### DRM Persistence operators
-
-**Mahout-Samsara's DRM persistance to HDFS is compatible with all 
Mahout-MapReduce algorithms such as seq2sparse.**
-
-
-Loading a DRM from (HD)FS:
-
-    drmDfsRead(path = hdfsPath)
-     
-Parallelizing from an in-core matrix:
-
-    val inCoreA = (dense(1, 2, 3), (3, 4, 5))
-    val A = drmParallelize(inCoreA)
-    
-Creating an empty DRM:
-
-    val A = drmParallelizeEmpty(100, 50)
-    
-Collecting to driver's jvm in-core:
-
-    val inCoreA = A.collect
-    
-**Warning: The collection of distributed matrices happens implicitly whenever 
conversion to an in-core (o.a.m.math.Matrix) type is required. E.g.:**
-
-    val inCoreA: Matrix = ...
-    val drmB: DrmLike[Int] =...
-    val inCoreC: Matrix = inCoreA %*%: drmB
-    
-**implies (incoreA %*%: drmB).collect**
-
-Collecting to (HD)FS as a Mahout's DRM formatted file:
-
-    A.dfsWrite(path = hdfsPath)
-    
-#### Logical algebraic operators on DRM matrices:
-
-A logical set of operators are defined for distributed matrices as a subset of 
those defined for in-core matrices.  In particular, since all distributed 
matrices are immutable, there are no assignment operators (e.g. **A += B**)
-*Note: please see: [Mahout Scala Bindings and Mahout Spark Bindings for Linear 
Algebra 
Subroutines](http://mahout.apache.org/users/sparkbindings/ScalaSparkBindings.pdf)
 for information on Mahout-Samsars's Algebraic Optimizer, and translation from 
logical operations to a physical plan for the back end.*
- 
-    
-Cache a DRM and trigger an optimized physical plan: 
-
-    drmA.checkpoint(CacheHint.MEMORY_AND_DISK)
-   
-Other valid caching Instructions:
-
-    drmA.checkpoint(CacheHint.NONE)
-    drmA.checkpoint(CacheHint.DISK_ONLY)
-    drmA.checkpoint(CacheHint.DISK_ONLY_2)
-    drmA.checkpoint(CacheHint.MEMORY_ONLY)
-    drmA.checkpoint(CacheHint.MEMORY_ONLY_2)
-    drmA.checkpoint(CacheHint.MEMORY_ONLY_SER
-    drmA.checkpoint(CacheHint.MEMORY_ONLY_SER_2)
-    drmA.checkpoint(CacheHint.MEMORY_AND_DISK_2)
-    drmA.checkpoint(CacheHint.MEMORY_AND_DISK_SER)
-    drmA.checkpoint(CacheHint.MEMORY_AND_DISK_SER_2)
-
-*Note: Logical DRM operations are lazily computed.  Currently the actual 
computations and optional caching will be triggered by dfsWrite(...), 
collect(...) and blockify(...).*
-
-
-
-Transposition:
-
-    A.t
- 
-Elementwise addition *(Matrices of identical geometry and row key types)*:
-  
-    A + B
-
-Elementwise subtraction *(Matrices of identical geometry and row key types)*:
-
-    A - B
-    
-Elementwise multiplication (Hadamard) *(Matrices of identical geometry and row 
key types)*:
-
-    A * B
-    
-Elementwise division *(Matrices of identical geometry and row key types)*:
-
-    A / B
-    
-**Elementwise operations involving one in-core argument (int-keyed DRMs 
only)**:
-
-    A + inCoreB
-    A - inCoreB
-    A * inCoreB
-    A / inCoreB
-    A :+ inCoreB
-    A :- inCoreB
-    A :* inCoreB
-    A :/ inCoreB
-    inCoreA +: B
-    inCoreA -: B
-    inCoreA *: B
-    inCoreA /: B
-
-Note the Spark associativity change (e.g. `A *: inCoreB` means 
`B.leftMultiply(A`), same as when both arguments are in core). Whenever 
operator arguments include both in-core and out-of-core arguments, the operator 
can only be associated with the out-of-core (DRM) argument to support the 
distributed implementation.
-    
-**Matrix-matrix multiplication %*%**:
-
-`\(\mathbf{M}=\mathbf{AB}\)`
-
-    A %*% B
-    A %*% inCoreB
-    A %*% inCoreDiagonal
-    A %*%: B
-
-
-*Note: same as above, whenever operator arguments include both in-core and 
out-of-core arguments, the operator can only be associated with the out-of-core 
(DRM) argument to support the distributed implementation.*
- 
-**Matrix-vector multiplication %*%**
-Currently we support a right multiply product of a DRM and an in-core 
Vector(`\(\mathbf{Ax}\)`) resulting in a single column DRM, which then can be 
collected in front (usually the desired outcome):
-
-    val Ax = A %*% x
-    val inCoreX = Ax.collect(::, 0)
-    
-
-**Matrix-scalar +,-,*,/**
-Elementwise operations of every matrix element and a scalar:
-
-    A + 5.0
-    A - 5.0
-    A :- 5.0
-    5.0 -: A
-    A * 5.0
-    A / 5.0
-    5.0 /: a
-    
-Note that `5.0 -: A` means `\(m_{ij} = 5 - a_{ij}\)` and `5.0 /: A` means 
`\(m_{ij} = \frac{5}{a{ij}}\)` for all elements of the result.
-    
-    
-#### Slicing
-
-General slice:
-
-    A(100 to 200, 100 to 200)
-    
-Horizontal Block:
-
-    A(::, 100 to 200)
-    
-Vertical Block:
-
-    A(100 to 200, ::)
-    
-*Note: if row range is not all-range (::) the the DRM must be `Int`-keyed.  
General case row slicing is not supported by DRMs with key types other than 
`Int`*.
-
-
-#### Stitching
-
-Stitch side by side (cbind R semantics):
-
-    val drmAnextToB = drmA cbind drmB
-    
-Stitch side by side (Scala):
-
-    val drmAnextToB = drmA.cbind(drmB)
-    
-Analogously, vertical concatenation is available via **rbind**
-
-#### Custom pipelines on blocks
-Internally, Mahout-Samsara's DRM is represented as a distributed set of 
vertical (Key, Block) tuples.
-
-**drm.mapBlock(...)**:
-
-The DRM operator `mapBlock` provides transformational access to the 
distributed vertical blockified tuples of a matrix (Row-Keys, 
Vertical-Matrix-Block).
-
-Using `mapBlock` to add 1.0 to a DRM:
-
-    val inCoreA = dense((1, 2, 3), (2, 3 , 4), (3, 4, 5))
-    val drmA = drmParallelize(inCoreA)
-    val B = A.mapBlock() {
-        case (keys, block) => keys -> (block += 1.0)
-    }
-    
-#### Broadcasting Vectors and matrices to closures
-Generally we can create and use one-way closure attributes to be used on the 
back end.
-
-Scalar matrix multiplication:
-
-    val factor: Int = 15
-    val drm2 = drm1.mapBlock() {
-        case (keys, block) => block *= factor
-        keys -> block
-    }
-
-**Closure attributes must be java-serializable. Currently Mahout's in-core 
Vectors and Matrices are not java-serializable, and must be broadcast to the 
closure using `drmBroadcast(...)`**:
-
-    val v: Vector ...
-    val bcastV = drmBroadcast(v)
-    val drm2 = drm1.mapBlock() {
-        case (keys, block) =>
-            for(row <- 0 until block.nrow) block(row, ::) -= bcastV
-        keys -> block    
-    }
-
-#### Computations providing ad-hoc summaries
-
-
-Matrix cardinality:
-
-    drmA.nrow
-    drmA.ncol
-
-*Note: depending on the stage of optimization, these may trigger a 
computational action.  I.e. if one calls `nrow()` n times, then the back end 
will actually recompute `nrow` n times.*
-    
-Means and sums:
-
-    drmA.colSums
-    drmA.colMeans
-    drmA.rowSums
-    drmA.rowMeans
-    
- 
-*Note: These will always trigger a computational action.  I.e. if one calls 
`colSums()` n times, then the back end will actually recompute `colSums` n 
times.*
-
-#### Distributed Matrix Decompositions
-
-To import the decomposition package:
-    
-    import org.apache.mahout.math._
-    import decompositions._
-    
-Distributed thin QR:
-
-    val (drmQ, incoreR) = dqrThin(drmA)
-    
-Distributed SSVD:
- 
-    val (drmU, drmV, s) = dssvd(drmA, k = 40, q = 1)
-    
-Distributed SPCA:
-
-    val (drmU, drmV, s) = dspca(drmA, k = 30, q = 1)
-
-Distributed regularized ALS:
-
-    val (drmU, drmV, i) = dals(drmA,
-                            k = 50,
-                            lambda = 0.0,
-                            maxIterations = 10,
-                            convergenceThreshold = 0.10))
-                            
-#### Adjusting parallelism of computations
-
-Set the minimum parallelism to 100 for computations on `drmA`:
-
-    drmA.par(min = 100)
- 
-Set the exact parallelism to 100 for computations on `drmA`:
-
-    drmA.par(exact = 100)
-
-
-Set the engine specific automatic parallelism adjustment for computations on 
`drmA`:
-
-    drmA.par(auto = true)
-
-#### Retrieving the engine specific data structure backing the DRM:
-
-**A Spark RDD:**
-
-    val myRDD = drmA.checkpoint().rdd
-    
-**An H2O Frame and Key Vec:**
-
-    val myFrame = drmA.frame
-    val myKeys = drmA.keys
-    
-**A Flink DataSet:**
-
-    val myDataSet = drmA.ds
-    
-For more information including information on Mahout-Samsara's Algebraic 
Optimizer and in-core Linear algebra bindings see: [Mahout Scala Bindings and 
Mahout Spark Bindings for Linear Algebra 
Subroutines](http://mahout.apache.org/users/sparkbindings/ScalaSparkBindings.pdf)
-
-
-
-    
-
-
-

http://git-wip-us.apache.org/repos/asf/mahout/blob/a60c79e7/website/_pages/docs/0.13.0/mahout-samsara/spark-bindings.md
----------------------------------------------------------------------
diff --git a/website/_pages/docs/0.13.0/mahout-samsara/spark-bindings.md 
b/website/_pages/docs/0.13.0/mahout-samsara/spark-bindings.md
deleted file mode 100644
index 094db0c..0000000
--- a/website/_pages/docs/0.13.0/mahout-samsara/spark-bindings.md
+++ /dev/null
@@ -1,100 +0,0 @@
----
-layout: mahoutdoc
-title: Mahout Samsara Spark
-permalink: /docs/0.13.0/mahout-samsara/spark-bindings/
----
-
-# Scala & Spark Bindings:
-*Bringing algebraic semantics*
-
-## What is Scala & Spark Bindings?
-
-In short, Scala & Spark Bindings for Mahout is Scala DSL and algebraic 
optimizer of something like this (actual formula from **(d)spca**)
-        
-
-`\[\mathbf{G}=\mathbf{B}\mathbf{B}^{\top}-\mathbf{C}-\mathbf{C}^{\top}+\mathbf{s}_{q}\mathbf{s}_{q}^{\top}\boldsymbol{\xi}^{\top}\boldsymbol{\xi}\]`
-
-bound to in-core and distributed computations (currently, on Apache Spark).
-
-
-Mahout Scala & Spark Bindings expression of the above:
-
-        val g = bt.t %*% bt - c - c.t + (s_q cross s_q) * (xi dot xi)
-
-The main idea is that a scientist writing algebraic expressions cannot care 
less of distributed 
-operation plans and works **entirely on the logical level** just like he or 
she would do with R.
-
-Another idea is decoupling logical expression from distributed back-end. As 
more back-ends are added, 
-this implies **"write once, run everywhere"**.
-
-The linear algebra side works with scalars, in-core vectors and matrices, and 
Mahout Distributed
-Row Matrices (DRMs).
-
-The ecosystem of operators is built in the R's image, i.e. it follows R naming 
such as %*%, 
-colSums, nrow, length operating over vectors or matices. 
-
-Important part of Spark Bindings is expression optimizer. It looks at 
expression as a whole 
-and figures out how it can be simplified, and which physical operators should 
be picked. For example,
-there are currently about 5 different physical operators performing DRM-DRM 
multiplication
-picked based on matrix geometry, distributed dataset partitioning, orientation 
etc. 
-If we count in DRM by in-core combinations, that would be another 4, i.e. 9 
total -- all of it for just 
-simple x %*% y logical notation.
-
-
-
-Please refer to the documentation for details.
-
-## Status
-
-This environment addresses mostly R-like Linear Algebra optmizations for 
-Spark, Flink and H20.
-
-
-## Documentation
-
-* Scala and Spark bindings manual: 
[web](http://apache.github.io/mahout/doc/ScalaSparkBindings.html), 
[pdf](ScalaSparkBindings.pdf)
-* Overview blog on 0.10.x releases: 
[blog](http://www.weatheringthroughtechdays.com/2015/04/mahout-010x-first-mahout-release-as.html)
-
-## Distributed methods and solvers using Bindings
-
-* In-core ([ssvd]) and Distributed ([dssvd]) Stochastic SVD -- guinea pigs -- 
see the bindings manual
-* In-core ([spca]) and Distributed ([dspca]) Stochastic PCA -- guinea pigs -- 
see the bindings manual
-* Distributed thin QR decomposition ([dqrThin]) -- guinea pig -- see the 
bindings manual 
-* [Current list of 
algorithms](https://mahout.apache.org/users/basics/algorithms.html)
-
-[ssvd]: 
https://github.com/apache/mahout/blob/trunk/math-scala/src/main/scala/org/apache/mahout/math/scalabindings/SSVD.scala
-[spca]: 
https://github.com/apache/mahout/blob/trunk/math-scala/src/main/scala/org/apache/mahout/math/scalabindings/SSVD.scala
-[dssvd]: 
https://github.com/apache/mahout/blob/trunk/spark/src/main/scala/org/apache/mahout/sparkbindings/decompositions/DSSVD.scala
-[dspca]: 
https://github.com/apache/mahout/blob/trunk/spark/src/main/scala/org/apache/mahout/sparkbindings/decompositions/DSPCA.scala
-[dqrThin]: 
https://github.com/apache/mahout/blob/trunk/spark/src/main/scala/org/apache/mahout/sparkbindings/decompositions/DQR.scala
-
-
-## Related history of note 
-
-* CLI and Driver for Spark version of item similarity -- 
[MAHOUT-1541](https://issues.apache.org/jira/browse/MAHOUT-1541)
-* Command line interface for generalizable Spark pipelines -- 
[MAHOUT-1569](https://issues.apache.org/jira/browse/MAHOUT-1569)
-* Cooccurrence Analysis / Item-based Recommendation -- 
[MAHOUT-1464](https://issues.apache.org/jira/browse/MAHOUT-1464)
-* Spark Bindings -- 
[MAHOUT-1346](https://issues.apache.org/jira/browse/MAHOUT-1346)
-* Scala Bindings -- 
[MAHOUT-1297](https://issues.apache.org/jira/browse/MAHOUT-1297)
-* Interactive Scala & Spark Bindings Shell & Script processor -- 
[MAHOUT-1489](https://issues.apache.org/jira/browse/MAHOUT-1489)
-* OLS tutorial using Mahout shell -- 
[MAHOUT-1542](https://issues.apache.org/jira/browse/MAHOUT-1542)
-* Full abstraction of DRM apis and algorithms from a distributed engine -- 
[MAHOUT-1529](https://issues.apache.org/jira/browse/MAHOUT-1529)
-* Port Naive Bayes -- 
[MAHOUT-1493](https://issues.apache.org/jira/browse/MAHOUT-1493)
-
-## Work in progress 
-* Text-delimited files for input and output -- 
[MAHOUT-1568](https://issues.apache.org/jira/browse/MAHOUT-1568)
-<!-- * Weighted (Implicit Feedback) ALS -- 
[MAHOUT-1365](https://issues.apache.org/jira/browse/MAHOUT-1365) -->
-<!--* Data frame R-like bindings -- 
[MAHOUT-1490](https://issues.apache.org/jira/browse/MAHOUT-1490) -->
-
-* *Your issue here!*
-
-<!-- ## Stuff wanted: 
-* Data frame R-like bindings (similarly to linalg bindings)
-* Stat R-like bindings (perhaps we can just adapt to commons.math stat)
-* **BYODMs:** Bring Your Own Distributed Method on SparkBindings! 
-* In-core jBlas matrix adapter
-* In-core GPU matrix adapters -->
-
-
-
-  
\ No newline at end of file

Reply via email to