Modified: 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/resnet/README.html
URL: 
http://svn.apache.org/viewvc/incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/resnet/README.html?rev=1857911&r1=1857910&r2=1857911&view=diff
==============================================================================
--- 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/resnet/README.html
 (original)
+++ 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/resnet/README.html
 Sun Apr 21 13:56:42 2019
@@ -9,7 +9,7 @@
   
   <meta name="viewport" content="width=device-width, initial-scale=1.0">
   
-  <title>–&gt; &mdash; incubator-singa 1.1.0 documentation</title>
+  <title>name: Resnets on ImageNet SINGA version: 1.1 SINGA commit: 
45ec92d8ffc1fa1385a9307fdf07e21da939ee2f parameter_url: 
https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-18.tar.gz license: 
Apache V2, https://github.com/facebook/fb.resnet.torch/blob/master/LICENSE 
&mdash; incubator-singa 1.1.0 documentation</title>
   
 
   
@@ -18,15 +18,21 @@
   
 
   
-
-  
+  <script type="text/javascript" 
src="../../../../../_static/js/modernizr.min.js"></script>
   
     
+      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../../../" 
src="../../../../../_static/documentation_options.js"></script>
+        <script type="text/javascript" 
src="../../../../../_static/jquery.js"></script>
+        <script type="text/javascript" 
src="../../../../../_static/underscore.js"></script>
+        <script type="text/javascript" 
src="../../../../../_static/doctools.js"></script>
+        <script type="text/javascript" 
src="../../../../../_static/language_data.js"></script>
+    
+    <script type="text/javascript" 
src="../../../../../_static/js/theme.js"></script>
 
-  
+    
 
   
-    <link rel="stylesheet" href="../../../../../_static/css/theme.css" 
type="text/css" />
+  <link rel="stylesheet" href="../../../../../_static/css/theme.css" 
type="text/css" />
   <link rel="stylesheet" href="../../../../../_static/pygments.css" 
type="text/css" />
     <link rel="index" title="Index" href="../../../../../genindex.html" />
     <link rel="search" title="Search" href="../../../../../search.html" />
@@ -42,21 +48,16 @@
        }
     </style>
 
-
-  
-  <script src="../../../../../_static/js/modernizr.min.js"></script>
-
 </head>
 
 <body class="wy-body-for-nav">
 
    
   <div class="wy-grid-for-nav">
-
     
     <nav data-toggle="wy-nav-shift" class="wy-nav-side">
       <div class="wy-side-scroll">
-        <div class="wy-side-nav-search">
+        <div class="wy-side-nav-search" >
           
 
           
@@ -161,7 +162,11 @@
     
       <li><a href="../../../../../index.html">Docs</a> &raquo;</li>
         
-      <li>–&gt;</li>
+      <li>name: Resnets on ImageNet
+SINGA version: 1.1
+SINGA commit: 45ec92d8ffc1fa1385a9307fdf07e21da939ee2f
+parameter_url: 
https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-18.tar.gz
+license: Apache V2, 
https://github.com/facebook/fb.resnet.torch/blob/master/LICENSE</li>
     
     
       <li class="wy-breadcrumbs-aside">
@@ -185,44 +190,43 @@
     regarding copyright ownership.  The ASF licenses this file
     to you under the Apache License, Version 2.0 (the
     "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at<div 
class="highlight-default notranslate"><div class="highlight"><pre><span></span> 
 <span class="n">http</span><span class="p">:</span><span 
class="o">//</span><span class="n">www</span><span class="o">.</span><span 
class="n">apache</span><span class="o">.</span><span class="n">org</span><span 
class="o">/</span><span class="n">licenses</span><span class="o">/</span><span 
class="n">LICENSE</span><span class="o">-</span><span class="mf">2.0</span>
+    with the License.  You may obtain a copy of the License at
 
-<span class="n">Unless</span> <span class="n">required</span> <span 
class="n">by</span> <span class="n">applicable</span> <span 
class="n">law</span> <span class="ow">or</span> <span class="n">agreed</span> 
<span class="n">to</span> <span class="ow">in</span> <span 
class="n">writing</span><span class="p">,</span>
-<span class="n">software</span> <span class="n">distributed</span> <span 
class="n">under</span> <span class="n">the</span> <span 
class="n">License</span> <span class="ow">is</span> <span 
class="n">distributed</span> <span class="n">on</span> <span class="n">an</span>
-<span class="s2">&quot;AS IS&quot;</span> <span class="n">BASIS</span><span 
class="p">,</span> <span class="n">WITHOUT</span> <span 
class="n">WARRANTIES</span> <span class="n">OR</span> <span 
class="n">CONDITIONS</span> <span class="n">OF</span> <span class="n">ANY</span>
-<span class="n">KIND</span><span class="p">,</span> <span 
class="n">either</span> <span class="n">express</span> <span 
class="ow">or</span> <span class="n">implied</span><span class="o">.</span>  
<span class="n">See</span> <span class="n">the</span> <span 
class="n">License</span> <span class="k">for</span> <span class="n">the</span>
-<span class="n">specific</span> <span class="n">language</span> <span 
class="n">governing</span> <span class="n">permissions</span> <span 
class="ow">and</span> <span class="n">limitations</span>
-<span class="n">under</span> <span class="n">the</span> <span 
class="n">License</span><span class="o">.</span>
-</pre></div>
-</div>
-<div class="section" id="">
-<span id="id1"></span><h1>–&gt;<a class="headerlink" href="#" 
title="Permalink to this headline">¶</a></h1>
-<p>name: Resnets on ImageNet
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+--><hr class="docutils" />
+<div class="section" 
id="name-resnets-on-imagenet-singa-version-1-1-singa-commit-45ec92d8ffc1fa1385a9307fdf07e21da939ee2f-parameter-url-https-s3-ap-southeast-1-amazonaws-com-dlfile-resnet-resnet-18-tar-gz-license-apache-v2-https-github-com-facebook-fb-resnet-torch-blob-master-license">
+<h1>name: Resnets on ImageNet
 SINGA version: 1.1
 SINGA commit: 45ec92d8ffc1fa1385a9307fdf07e21da939ee2f
 parameter_url: 
https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-18.tar.gz
-license: Apache V2, 
https://github.com/facebook/fb.resnet.torch/blob/master/LICENSE</p>
+license: Apache V2, 
https://github.com/facebook/fb.resnet.torch/blob/master/LICENSE<a 
class="headerlink" 
href="#name-resnets-on-imagenet-singa-version-1-1-singa-commit-45ec92d8ffc1fa1385a9307fdf07e21da939ee2f-parameter-url-https-s3-ap-southeast-1-amazonaws-com-dlfile-resnet-resnet-18-tar-gz-license-apache-v2-https-github-com-facebook-fb-resnet-torch-blob-master-license"
 title="Permalink to this headline">¶</a></h1>
 </div>
-<hr class="docutils" />
 <div class="section" id="image-classification-using-residual-networks">
-<span id="image-classification-using-residual-networks"></span><h1>Image 
Classification using Residual Networks<a class="headerlink" 
href="#image-classification-using-residual-networks" title="Permalink to this 
headline">¶</a></h1>
+<h1>Image Classification using Residual Networks<a class="headerlink" 
href="#image-classification-using-residual-networks" title="Permalink to this 
headline">¶</a></h1>
 <p>In this example, we convert Residual Networks trained on <a 
class="reference external" 
href="https://github.com/facebook/fb.resnet.torch";>Torch</a> to SINGA for image 
classification.</p>
 <div class="section" id="instructions">
-<span id="instructions"></span><h2>Instructions<a class="headerlink" 
href="#instructions" title="Permalink to this headline">¶</a></h2>
+<h2>Instructions<a class="headerlink" href="#instructions" title="Permalink to 
this headline">¶</a></h2>
 <ul>
-<li><p class="first">Download one parameter checkpoint file (see below) and 
the synset word file of ImageNet into this folder, e.g.,</p>
+<li><p>Download one parameter checkpoint file (see below) and the synset word 
file of ImageNet into this folder, e.g.,</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  $ wget 
https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-18.tar.gz
   $ wget https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/synset_words.txt
   $ tar xvf resnet-18.tar.gz
 </pre></div>
 </div>
 </li>
-<li><p class="first">Usage</p>
+<li><p>Usage</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  $ python serve.py -h
 </pre></div>
 </div>
 </li>
-<li><p class="first">Example</p>
+<li><p>Example</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  # use cpu
   $ python serve.py --use_cpu --parameter_file resnet-18.pickle --model resnet 
--depth 18 &amp;
   # use gpu
@@ -231,13 +235,13 @@ license: Apache V2, https://github.com/f
 </div>
 <p>The parameter files for the following model and depth configuration pairs 
are provided:</p>
 <ul class="simple">
-<li>resnet (original resnet), <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-18.tar.gz";>18</a>|<a
 class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-34.tar.gz";>34</a>|<a
 class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-101.tar.gz";>101</a>|<a
 class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-152.tar.gz";>152</a></li>
-<li>addbn (resnet with a batch normalization layer after the addition), <a 
class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-50.tar.gz";>50</a></li>
-<li>wrn (wide resnet), <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/wrn-50-2.tar.gz";>50</a></li>
-<li>preact (resnet with pre-activation) <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-200.tar.gz";>200</a></li>
+<li><p>resnet (original resnet), <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-18.tar.gz";>18</a>|<a
 class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-34.tar.gz";>34</a>|<a
 class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-101.tar.gz";>101</a>|<a
 class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-152.tar.gz";>152</a></p></li>
+<li><p>addbn (resnet with a batch normalization layer after the addition), <a 
class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-50.tar.gz";>50</a></p></li>
+<li><p>wrn (wide resnet), <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/wrn-50-2.tar.gz";>50</a></p></li>
+<li><p>preact (resnet with pre-activation) <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-200.tar.gz";>200</a></p></li>
 </ul>
 </li>
-<li><p class="first">Submit images for classification</p>
+<li><p>Submit images for classification</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  $ curl -i -F [email protected] 
http://localhost:9999/api
   $ curl -i -F [email protected] http://localhost:9999/api
   $ curl -i -F [email protected] http://localhost:9999/api
@@ -248,7 +252,7 @@ license: Apache V2, https://github.com/f
 <p>image1.jpg, image2.jpg and image3.jpg should be downloaded before executing 
the above commands.</p>
 </div>
 <div class="section" id="details">
-<span id="details"></span><h2>Details<a class="headerlink" href="#details" 
title="Permalink to this headline">¶</a></h2>
+<h2>Details<a class="headerlink" href="#details" title="Permalink to this 
headline">¶</a></h2>
 <p>The parameter files were extracted from the original <a class="reference 
external" 
href="https://github.com/facebook/fb.resnet.torch/tree/master/pretrained";>torch 
files</a> via
 the convert.py program.</p>
 <p>Usage:</p>
@@ -269,7 +273,7 @@ the convert.py program.</p>
 
   <div role="contentinfo">
     <p>
-        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective 
owners..
+        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective owners.
 
     </p>
   </div>
@@ -286,36 +290,17 @@ the convert.py program.</p>
   
 
 
-  
-
-    <script type="text/javascript">
-        var DOCUMENTATION_OPTIONS = {
-            URL_ROOT:'../../../../../',
-            VERSION:'1.1.0',
-            LANGUAGE:'None',
-            COLLAPSE_INDEX:false,
-            FILE_SUFFIX:'.html',
-            HAS_SOURCE:  true,
-            SOURCELINK_SUFFIX: '.txt'
-        };
-    </script>
-      <script type="text/javascript" 
src="../../../../../_static/jquery.js"></script>
-      <script type="text/javascript" 
src="../../../../../_static/underscore.js"></script>
-      <script type="text/javascript" 
src="../../../../../_static/doctools.js"></script>
-
-  
-
-  
-  
-    <script type="text/javascript" 
src="../../../../../_static/js/theme.js"></script>
-  
-
   <script type="text/javascript">
       jQuery(function () {
           SphinxRtdTheme.Navigation.enable(true);
       });
   </script>
 
+  
+  
+    
+  
+
 <div class="rst-versions" data-toggle="rst-versions" role="note" 
aria-label="versions">
   <span class="rst-current-version" data-toggle="rst-current-version">
     <span class="fa fa-book"> incubator-singa </span>

Modified: 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/vgg/README.html
URL: 
http://svn.apache.org/viewvc/incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/vgg/README.html?rev=1857911&r1=1857910&r2=1857911&view=diff
==============================================================================
--- 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/vgg/README.html 
(original)
+++ 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/vgg/README.html 
Sun Apr 21 13:56:42 2019
@@ -9,7 +9,7 @@
   
   <meta name="viewport" content="width=device-width, initial-scale=1.0">
   
-  <title>–&gt; &mdash; incubator-singa 1.1.0 documentation</title>
+  <title>name: VGG models on ImageNet SINGA version: 1.1.1 SINGA commit: 
license: 
https://github.com/pytorch/vision/blob/master/torchvision/models/vgg.py &mdash; 
incubator-singa 1.1.0 documentation</title>
   
 
   
@@ -18,15 +18,21 @@
   
 
   
-
-  
+  <script type="text/javascript" 
src="../../../../../_static/js/modernizr.min.js"></script>
   
     
+      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../../../" 
src="../../../../../_static/documentation_options.js"></script>
+        <script type="text/javascript" 
src="../../../../../_static/jquery.js"></script>
+        <script type="text/javascript" 
src="../../../../../_static/underscore.js"></script>
+        <script type="text/javascript" 
src="../../../../../_static/doctools.js"></script>
+        <script type="text/javascript" 
src="../../../../../_static/language_data.js"></script>
+    
+    <script type="text/javascript" 
src="../../../../../_static/js/theme.js"></script>
 
-  
+    
 
   
-    <link rel="stylesheet" href="../../../../../_static/css/theme.css" 
type="text/css" />
+  <link rel="stylesheet" href="../../../../../_static/css/theme.css" 
type="text/css" />
   <link rel="stylesheet" href="../../../../../_static/pygments.css" 
type="text/css" />
     <link rel="index" title="Index" href="../../../../../genindex.html" />
     <link rel="search" title="Search" href="../../../../../search.html" />
@@ -42,21 +48,16 @@
        }
     </style>
 
-
-  
-  <script src="../../../../../_static/js/modernizr.min.js"></script>
-
 </head>
 
 <body class="wy-body-for-nav">
 
    
   <div class="wy-grid-for-nav">
-
     
     <nav data-toggle="wy-nav-shift" class="wy-nav-side">
       <div class="wy-side-scroll">
-        <div class="wy-side-nav-search">
+        <div class="wy-side-nav-search" >
           
 
           
@@ -161,7 +162,10 @@
     
       <li><a href="../../../../../index.html">Docs</a> &raquo;</li>
         
-      <li>–&gt;</li>
+      <li>name: VGG models on ImageNet
+SINGA version: 1.1.1
+SINGA commit:
+license: 
https://github.com/pytorch/vision/blob/master/torchvision/models/vgg.py</li>
     
     
       <li class="wy-breadcrumbs-aside">
@@ -185,44 +189,43 @@
     regarding copyright ownership.  The ASF licenses this file
     to you under the Apache License, Version 2.0 (the
     "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at<div 
class="highlight-default notranslate"><div class="highlight"><pre><span></span> 
 <span class="n">http</span><span class="p">:</span><span 
class="o">//</span><span class="n">www</span><span class="o">.</span><span 
class="n">apache</span><span class="o">.</span><span class="n">org</span><span 
class="o">/</span><span class="n">licenses</span><span class="o">/</span><span 
class="n">LICENSE</span><span class="o">-</span><span class="mf">2.0</span>
+    with the License.  You may obtain a copy of the License at
 
-<span class="n">Unless</span> <span class="n">required</span> <span 
class="n">by</span> <span class="n">applicable</span> <span 
class="n">law</span> <span class="ow">or</span> <span class="n">agreed</span> 
<span class="n">to</span> <span class="ow">in</span> <span 
class="n">writing</span><span class="p">,</span>
-<span class="n">software</span> <span class="n">distributed</span> <span 
class="n">under</span> <span class="n">the</span> <span 
class="n">License</span> <span class="ow">is</span> <span 
class="n">distributed</span> <span class="n">on</span> <span class="n">an</span>
-<span class="s2">&quot;AS IS&quot;</span> <span class="n">BASIS</span><span 
class="p">,</span> <span class="n">WITHOUT</span> <span 
class="n">WARRANTIES</span> <span class="n">OR</span> <span 
class="n">CONDITIONS</span> <span class="n">OF</span> <span class="n">ANY</span>
-<span class="n">KIND</span><span class="p">,</span> <span 
class="n">either</span> <span class="n">express</span> <span 
class="ow">or</span> <span class="n">implied</span><span class="o">.</span>  
<span class="n">See</span> <span class="n">the</span> <span 
class="n">License</span> <span class="k">for</span> <span class="n">the</span>
-<span class="n">specific</span> <span class="n">language</span> <span 
class="n">governing</span> <span class="n">permissions</span> <span 
class="ow">and</span> <span class="n">limitations</span>
-<span class="n">under</span> <span class="n">the</span> <span 
class="n">License</span><span class="o">.</span>
-</pre></div>
-</div>
-<div class="section" id="">
-<span id="id1"></span><h1>–&gt;<a class="headerlink" href="#" 
title="Permalink to this headline">¶</a></h1>
-<p>name: VGG models on ImageNet
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+--><hr class="docutils" />
+<div class="section" 
id="name-vgg-models-on-imagenet-singa-version-1-1-1-singa-commit-license-https-github-com-pytorch-vision-blob-master-torchvision-models-vgg-py">
+<h1>name: VGG models on ImageNet
 SINGA version: 1.1.1
 SINGA commit:
-license: 
https://github.com/pytorch/vision/blob/master/torchvision/models/vgg.py</p>
+license: 
https://github.com/pytorch/vision/blob/master/torchvision/models/vgg.py<a 
class="headerlink" 
href="#name-vgg-models-on-imagenet-singa-version-1-1-1-singa-commit-license-https-github-com-pytorch-vision-blob-master-torchvision-models-vgg-py"
 title="Permalink to this headline">¶</a></h1>
 </div>
-<hr class="docutils" />
 <div class="section" id="image-classification-using-vgg">
-<span id="image-classification-using-vgg"></span><h1>Image Classification 
using VGG<a class="headerlink" href="#image-classification-using-vgg" 
title="Permalink to this headline">¶</a></h1>
+<h1>Image Classification using VGG<a class="headerlink" 
href="#image-classification-using-vgg" title="Permalink to this 
headline">¶</a></h1>
 <p>In this example, we convert VGG on <a class="reference external" 
href="https://github.com/pytorch/vision/blob/master/torchvision/models/vgg.py";>PyTorch</a>
 to SINGA for image classification.</p>
 <div class="section" id="instructions">
-<span id="instructions"></span><h2>Instructions<a class="headerlink" 
href="#instructions" title="Permalink to this headline">¶</a></h2>
+<h2>Instructions<a class="headerlink" href="#instructions" title="Permalink to 
this headline">¶</a></h2>
 <ul>
-<li><p class="first">Download one parameter checkpoint file (see below) and 
the synset word file of ImageNet into this folder, e.g.,</p>
+<li><p>Download one parameter checkpoint file (see below) and the synset word 
file of ImageNet into this folder, e.g.,</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  $ wget 
https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg11.tar.gz
   $ wget https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/synset_words.txt
   $ tar xvf vgg11.tar.gz
 </pre></div>
 </div>
 </li>
-<li><p class="first">Usage</p>
+<li><p>Usage</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  $ python serve.py -h
 </pre></div>
 </div>
 </li>
-<li><p class="first">Example</p>
+<li><p>Example</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  # use cpu
   $ python serve.py --use_cpu --parameter_file vgg11.pickle --depth 11 &amp;
   # use gpu
@@ -231,11 +234,11 @@ to SINGA for image classification.</p>
 </div>
 <p>The parameter files for the following model and depth configuration pairs 
are provided:</p>
 <ul class="simple">
-<li>Without batch-normalization, <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg11.tar.gz";>11</a>, 
<a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg13.tar.gz";>13</a>, 
<a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg16.tar.gz";>16</a>, 
<a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg19.tar.gz";>19</a></li>
-<li>With batch-normalization, <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg11_bn.tar.gz";>11</a>,
 <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg13_bn.tar.gz";>13</a>,
 <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg16_bn.tar.gz";>16</a>,
 <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg19_bn.tar.gz";>19</a></li>
+<li><p>Without batch-normalization, <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg11.tar.gz";>11</a>, 
<a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg13.tar.gz";>13</a>, 
<a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg16.tar.gz";>16</a>, 
<a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg19.tar.gz";>19</a></p></li>
+<li><p>With batch-normalization, <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg11_bn.tar.gz";>11</a>,
 <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg13_bn.tar.gz";>13</a>,
 <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg16_bn.tar.gz";>16</a>,
 <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg19_bn.tar.gz";>19</a></p></li>
 </ul>
 </li>
-<li><p class="first">Submit images for classification</p>
+<li><p>Submit images for classification</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  $ curl -i -F [email protected] 
http://localhost:9999/api
   $ curl -i -F [email protected] http://localhost:9999/api
   $ curl -i -F [email protected] http://localhost:9999/api
@@ -246,7 +249,7 @@ to SINGA for image classification.</p>
 <p>image1.jpg, image2.jpg and image3.jpg should be downloaded before executing 
the above commands.</p>
 </div>
 <div class="section" id="details">
-<span id="details"></span><h2>Details<a class="headerlink" href="#details" 
title="Permalink to this headline">¶</a></h2>
+<h2>Details<a class="headerlink" href="#details" title="Permalink to this 
headline">¶</a></h2>
 <p>The parameter files were converted from the pytorch via the convert.py 
program.</p>
 <p>Usage:</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>$ python convert.py -h
@@ -266,7 +269,7 @@ to SINGA for image classification.</p>
 
   <div role="contentinfo">
     <p>
-        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective 
owners..
+        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective owners.
 
     </p>
   </div>
@@ -283,36 +286,17 @@ to SINGA for image classification.</p>
   
 
 
-  
-
-    <script type="text/javascript">
-        var DOCUMENTATION_OPTIONS = {
-            URL_ROOT:'../../../../../',
-            VERSION:'1.1.0',
-            LANGUAGE:'None',
-            COLLAPSE_INDEX:false,
-            FILE_SUFFIX:'.html',
-            HAS_SOURCE:  true,
-            SOURCELINK_SUFFIX: '.txt'
-        };
-    </script>
-      <script type="text/javascript" 
src="../../../../../_static/jquery.js"></script>
-      <script type="text/javascript" 
src="../../../../../_static/underscore.js"></script>
-      <script type="text/javascript" 
src="../../../../../_static/doctools.js"></script>
-
-  
-
-  
-  
-    <script type="text/javascript" 
src="../../../../../_static/js/theme.js"></script>
-  
-
   <script type="text/javascript">
       jQuery(function () {
           SphinxRtdTheme.Navigation.enable(true);
       });
   </script>
 
+  
+  
+    
+  
+
 <div class="rst-versions" data-toggle="rst-versions" role="note" 
aria-label="versions">
   <span class="rst-current-version" data-toggle="rst-current-version">
     <span class="fa fa-book"> incubator-singa </span>

Modified: incubator/singa/site/trunk/en/docs/model_zoo/examples/index.html
URL: 
http://svn.apache.org/viewvc/incubator/singa/site/trunk/en/docs/model_zoo/examples/index.html?rev=1857911&r1=1857910&r2=1857911&view=diff
==============================================================================
--- incubator/singa/site/trunk/en/docs/model_zoo/examples/index.html (original)
+++ incubator/singa/site/trunk/en/docs/model_zoo/examples/index.html Sun Apr 21 
13:56:42 2019
@@ -18,15 +18,21 @@
   
 
   
-
-  
+  <script type="text/javascript" 
src="../../../_static/js/modernizr.min.js"></script>
   
     
+      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../" 
src="../../../_static/documentation_options.js"></script>
+        <script type="text/javascript" 
src="../../../_static/jquery.js"></script>
+        <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
+        <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
+        <script type="text/javascript" 
src="../../../_static/language_data.js"></script>
+    
+    <script type="text/javascript" src="../../../_static/js/theme.js"></script>
 
-  
+    
 
   
-    <link rel="stylesheet" href="../../../_static/css/theme.css" 
type="text/css" />
+  <link rel="stylesheet" href="../../../_static/css/theme.css" type="text/css" 
/>
   <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" 
/>
     <link rel="index" title="Index" href="../../../genindex.html" />
     <link rel="search" title="Search" href="../../../search.html" />
@@ -42,21 +48,16 @@
        }
     </style>
 
-
-  
-  <script src="../../../_static/js/modernizr.min.js"></script>
-
 </head>
 
 <body class="wy-body-for-nav">
 
    
   <div class="wy-grid-for-nav">
-
     
     <nav data-toggle="wy-nav-shift" class="wy-nav-side">
       <div class="wy-side-scroll">
-        <div class="wy-side-nav-search">
+        <div class="wy-side-nav-search" >
           
 
           
@@ -210,31 +211,51 @@
 </li>
 </ul>
 </li>
-<li class="toctree-l1"><a class="reference internal" 
href="imagenet/densenet/README.html">–&gt;</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="imagenet/densenet/README.html">name: DenseNet models on ImageNet
+SINGA version: 1.1.1
+SINGA commit:
+license: 
https://github.com/pytorch/vision/blob/master/torchvision/models/densenet.py</a></li>
 <li class="toctree-l1"><a class="reference internal" 
href="imagenet/densenet/README.html#image-classification-using-densenet">Image 
Classification using DenseNet</a><ul>
 <li class="toctree-l2"><a class="reference internal" 
href="imagenet/densenet/README.html#instructions">Instructions</a></li>
 <li class="toctree-l2"><a class="reference internal" 
href="imagenet/densenet/README.html#details">Details</a></li>
 </ul>
 </li>
-<li class="toctree-l1"><a class="reference internal" 
href="imagenet/googlenet/README.html">–&gt;</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="imagenet/googlenet/README.html">name: GoogleNet on ImageNet
+SINGA version: 1.0.1
+SINGA commit: 8c990f7da2de220e8a012c6a8ecc897dc7532744
+parameter_url: 
https://s3-ap-southeast-1.amazonaws.com/dlfile/bvlc_googlenet.tar.gz
+parameter_sha1: 0a88e8948b1abca3badfd8d090d6be03f8d7655d
+license: unrestricted 
https://github.com/BVLC/caffe/tree/master/models/bvlc_googlenet</a></li>
 <li class="toctree-l1"><a class="reference internal" 
href="imagenet/googlenet/README.html#image-classification-using-googlenet">Image
 Classification using GoogleNet</a><ul>
 <li class="toctree-l2"><a class="reference internal" 
href="imagenet/googlenet/README.html#instructions">Instructions</a></li>
 <li class="toctree-l2"><a class="reference internal" 
href="imagenet/googlenet/README.html#details">Details</a></li>
 </ul>
 </li>
-<li class="toctree-l1"><a class="reference internal" 
href="imagenet/inception/README.html">–&gt;</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="imagenet/inception/README.html">name: Inception V4 on ImageNet
+SINGA version: 1.1.1
+SINGA commit:
+parameter_url: 
https://s3-ap-southeast-1.amazonaws.com/dlfile/inception_v4.tar.gz
+parameter_sha1: 5fdd6f5d8af8fd10e7321d9b38bb87ef14e80d56
+license: https://github.com/tensorflow/models/tree/master/slim</a></li>
 <li class="toctree-l1"><a class="reference internal" 
href="imagenet/inception/README.html#image-classification-using-inception-v4">Image
 Classification using Inception V4</a><ul>
 <li class="toctree-l2"><a class="reference internal" 
href="imagenet/inception/README.html#instructions">Instructions</a></li>
 <li class="toctree-l2"><a class="reference internal" 
href="imagenet/inception/README.html#details">Details</a></li>
 </ul>
 </li>
-<li class="toctree-l1"><a class="reference internal" 
href="imagenet/resnet/README.html">–&gt;</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="imagenet/resnet/README.html">name: Resnets on ImageNet
+SINGA version: 1.1
+SINGA commit: 45ec92d8ffc1fa1385a9307fdf07e21da939ee2f
+parameter_url: 
https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-18.tar.gz
+license: Apache V2, 
https://github.com/facebook/fb.resnet.torch/blob/master/LICENSE</a></li>
 <li class="toctree-l1"><a class="reference internal" 
href="imagenet/resnet/README.html#image-classification-using-residual-networks">Image
 Classification using Residual Networks</a><ul>
 <li class="toctree-l2"><a class="reference internal" 
href="imagenet/resnet/README.html#instructions">Instructions</a></li>
 <li class="toctree-l2"><a class="reference internal" 
href="imagenet/resnet/README.html#details">Details</a></li>
 </ul>
 </li>
-<li class="toctree-l1"><a class="reference internal" 
href="imagenet/vgg/README.html">–&gt;</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="imagenet/vgg/README.html">name: VGG models on ImageNet
+SINGA version: 1.1.1
+SINGA commit:
+license: 
https://github.com/pytorch/vision/blob/master/torchvision/models/vgg.py</a></li>
 <li class="toctree-l1"><a class="reference internal" 
href="imagenet/vgg/README.html#image-classification-using-vgg">Image 
Classification using VGG</a><ul>
 <li class="toctree-l2"><a class="reference internal" 
href="imagenet/vgg/README.html#instructions">Instructions</a></li>
 <li class="toctree-l2"><a class="reference internal" 
href="imagenet/vgg/README.html#details">Details</a></li>
@@ -255,7 +276,7 @@
 
   <div role="contentinfo">
     <p>
-        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective 
owners..
+        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective owners.
 
     </p>
   </div>
@@ -272,36 +293,17 @@
   
 
 
-  
-
-    <script type="text/javascript">
-        var DOCUMENTATION_OPTIONS = {
-            URL_ROOT:'../../../',
-            VERSION:'1.1.0',
-            LANGUAGE:'None',
-            COLLAPSE_INDEX:false,
-            FILE_SUFFIX:'.html',
-            HAS_SOURCE:  true,
-            SOURCELINK_SUFFIX: '.txt'
-        };
-    </script>
-      <script type="text/javascript" src="../../../_static/jquery.js"></script>
-      <script type="text/javascript" 
src="../../../_static/underscore.js"></script>
-      <script type="text/javascript" 
src="../../../_static/doctools.js"></script>
-
-  
-
-  
-  
-    <script type="text/javascript" src="../../../_static/js/theme.js"></script>
-  
-
   <script type="text/javascript">
       jQuery(function () {
           SphinxRtdTheme.Navigation.enable(true);
       });
   </script>
 
+  
+  
+    
+  
+
 <div class="rst-versions" data-toggle="rst-versions" role="note" 
aria-label="versions">
   <span class="rst-current-version" data-toggle="rst-current-version">
     <span class="fa fa-book"> incubator-singa </span>

Modified: 
incubator/singa/site/trunk/en/docs/model_zoo/examples/mnist/README.html
URL: 
http://svn.apache.org/viewvc/incubator/singa/site/trunk/en/docs/model_zoo/examples/mnist/README.html?rev=1857911&r1=1857910&r2=1857911&view=diff
==============================================================================
--- incubator/singa/site/trunk/en/docs/model_zoo/examples/mnist/README.html 
(original)
+++ incubator/singa/site/trunk/en/docs/model_zoo/examples/mnist/README.html Sun 
Apr 21 13:56:42 2019
@@ -18,15 +18,21 @@
   
 
   
-
-  
+  <script type="text/javascript" 
src="../../../../_static/js/modernizr.min.js"></script>
   
     
+      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../../" 
src="../../../../_static/documentation_options.js"></script>
+        <script type="text/javascript" 
src="../../../../_static/jquery.js"></script>
+        <script type="text/javascript" 
src="../../../../_static/underscore.js"></script>
+        <script type="text/javascript" 
src="../../../../_static/doctools.js"></script>
+        <script type="text/javascript" 
src="../../../../_static/language_data.js"></script>
+    
+    <script type="text/javascript" 
src="../../../../_static/js/theme.js"></script>
 
-  
+    
 
   
-    <link rel="stylesheet" href="../../../../_static/css/theme.css" 
type="text/css" />
+  <link rel="stylesheet" href="../../../../_static/css/theme.css" 
type="text/css" />
   <link rel="stylesheet" href="../../../../_static/pygments.css" 
type="text/css" />
     <link rel="index" title="Index" href="../../../../genindex.html" />
     <link rel="search" title="Search" href="../../../../search.html" />
@@ -42,21 +48,16 @@
        }
     </style>
 
-
-  
-  <script src="../../../../_static/js/modernizr.min.js"></script>
-
 </head>
 
 <body class="wy-body-for-nav">
 
    
   <div class="wy-grid-for-nav">
-
     
     <nav data-toggle="wy-nav-shift" class="wy-nav-side">
       <div class="wy-side-scroll">
-        <div class="wy-side-nav-search">
+        <div class="wy-side-nav-search" >
           
 
           
@@ -185,28 +186,26 @@
     regarding copyright ownership.  The ASF licenses this file
     to you under the Apache License, Version 2.0 (the
     "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at<div 
class="highlight-default notranslate"><div class="highlight"><pre><span></span> 
 <span class="n">http</span><span class="p">:</span><span 
class="o">//</span><span class="n">www</span><span class="o">.</span><span 
class="n">apache</span><span class="o">.</span><span class="n">org</span><span 
class="o">/</span><span class="n">licenses</span><span class="o">/</span><span 
class="n">LICENSE</span><span class="o">-</span><span class="mf">2.0</span>
+    with the License.  You may obtain a copy of the License at
 
-<span class="n">Unless</span> <span class="n">required</span> <span 
class="n">by</span> <span class="n">applicable</span> <span 
class="n">law</span> <span class="ow">or</span> <span class="n">agreed</span> 
<span class="n">to</span> <span class="ow">in</span> <span 
class="n">writing</span><span class="p">,</span>
-<span class="n">software</span> <span class="n">distributed</span> <span 
class="n">under</span> <span class="n">the</span> <span 
class="n">License</span> <span class="ow">is</span> <span 
class="n">distributed</span> <span class="n">on</span> <span class="n">an</span>
-<span class="s2">&quot;AS IS&quot;</span> <span class="n">BASIS</span><span 
class="p">,</span> <span class="n">WITHOUT</span> <span 
class="n">WARRANTIES</span> <span class="n">OR</span> <span 
class="n">CONDITIONS</span> <span class="n">OF</span> <span class="n">ANY</span>
-<span class="n">KIND</span><span class="p">,</span> <span 
class="n">either</span> <span class="n">express</span> <span 
class="ow">or</span> <span class="n">implied</span><span class="o">.</span>  
<span class="n">See</span> <span class="n">the</span> <span 
class="n">License</span> <span class="k">for</span> <span class="n">the</span>
-<span class="n">specific</span> <span class="n">language</span> <span 
class="n">governing</span> <span class="n">permissions</span> <span 
class="ow">and</span> <span class="n">limitations</span>
-<span class="n">under</span> <span class="n">the</span> <span 
class="n">License</span><span class="o">.</span>
-</pre></div>
-</div>
-<p>–&gt;</p>
-<div class="section" id="train-a-rbm-model-against-mnist-dataset">
-<span id="train-a-rbm-model-against-mnist-dataset"></span><h1>Train a RBM 
model against MNIST dataset<a class="headerlink" 
href="#train-a-rbm-model-against-mnist-dataset" title="Permalink to this 
headline">¶</a></h1>
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+--><div class="section" id="train-a-rbm-model-against-mnist-dataset">
+<h1>Train a RBM model against MNIST dataset<a class="headerlink" 
href="#train-a-rbm-model-against-mnist-dataset" title="Permalink to this 
headline">¶</a></h1>
 <p>This example is to train an RBM model using the
 MNIST dataset. The RBM model and its hyper-parameters are set following
-<a class="reference external" 
href="http://www.cs.toronto.edu/~hinton/science.pdf";>Hinton’s paper</a></p>
+<a class="reference external" 
href="http://www.cs.toronto.edu/%7Ehinton/science.pdf";>Hinton’s paper</a></p>
 <div class="section" id="running-instructions">
-<span id="running-instructions"></span><h2>Running instructions<a 
class="headerlink" href="#running-instructions" title="Permalink to this 
headline">¶</a></h2>
+<h2>Running instructions<a class="headerlink" href="#running-instructions" 
title="Permalink to this headline">¶</a></h2>
 <ol>
-<li><p class="first">Download the pre-processed <a class="reference external" 
href="https://github.com/mnielsen/neural-networks-and-deep-learning/raw/master/data/mnist.pkl.gz";>MNIST
 dataset</a></p>
-</li>
-<li><p class="first">Start the training</p>
+<li><p>Download the pre-processed <a class="reference external" 
href="https://github.com/mnielsen/neural-networks-and-deep-learning/raw/master/data/mnist.pkl.gz";>MNIST
 dataset</a></p></li>
+<li><p>Start the training</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span> <span class="n">python</span> <span 
class="n">train</span><span class="o">.</span><span class="n">py</span> <span 
class="n">mnist</span><span class="o">.</span><span class="n">pkl</span><span 
class="o">.</span><span class="n">gz</span>
 </pre></div>
 </div>
@@ -231,7 +230,7 @@ the program with an additional argument<
 
   <div role="contentinfo">
     <p>
-        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective 
owners..
+        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective owners.
 
     </p>
   </div>
@@ -248,36 +247,17 @@ the program with an additional argument<
   
 
 
-  
-
-    <script type="text/javascript">
-        var DOCUMENTATION_OPTIONS = {
-            URL_ROOT:'../../../../',
-            VERSION:'1.1.0',
-            LANGUAGE:'None',
-            COLLAPSE_INDEX:false,
-            FILE_SUFFIX:'.html',
-            HAS_SOURCE:  true,
-            SOURCELINK_SUFFIX: '.txt'
-        };
-    </script>
-      <script type="text/javascript" 
src="../../../../_static/jquery.js"></script>
-      <script type="text/javascript" 
src="../../../../_static/underscore.js"></script>
-      <script type="text/javascript" 
src="../../../../_static/doctools.js"></script>
-
-  
-
-  
-  
-    <script type="text/javascript" 
src="../../../../_static/js/theme.js"></script>
-  
-
   <script type="text/javascript">
       jQuery(function () {
           SphinxRtdTheme.Navigation.enable(true);
       });
   </script>
 
+  
+  
+    
+  
+
 <div class="rst-versions" data-toggle="rst-versions" role="note" 
aria-label="versions">
   <span class="rst-current-version" data-toggle="rst-current-version">
     <span class="fa fa-book"> incubator-singa </span>

Modified: 
incubator/singa/site/trunk/en/docs/model_zoo/imagenet/alexnet/README.html
URL: 
http://svn.apache.org/viewvc/incubator/singa/site/trunk/en/docs/model_zoo/imagenet/alexnet/README.html?rev=1857911&r1=1857910&r2=1857911&view=diff
==============================================================================
--- incubator/singa/site/trunk/en/docs/model_zoo/imagenet/alexnet/README.html 
(original)
+++ incubator/singa/site/trunk/en/docs/model_zoo/imagenet/alexnet/README.html 
Sun Apr 21 13:56:42 2019
@@ -18,19 +18,25 @@
   
 
   
-
-  
+  <script type="text/javascript" 
src="../../../../_static/js/modernizr.min.js"></script>
   
     
+      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../../" 
src="../../../../_static/documentation_options.js"></script>
+        <script type="text/javascript" 
src="../../../../_static/jquery.js"></script>
+        <script type="text/javascript" 
src="../../../../_static/underscore.js"></script>
+        <script type="text/javascript" 
src="../../../../_static/doctools.js"></script>
+        <script type="text/javascript" 
src="../../../../_static/language_data.js"></script>
+    
+    <script type="text/javascript" 
src="../../../../_static/js/theme.js"></script>
 
-  
+    
 
   
-    <link rel="stylesheet" href="../../../../_static/css/theme.css" 
type="text/css" />
+  <link rel="stylesheet" href="../../../../_static/css/theme.css" 
type="text/css" />
   <link rel="stylesheet" href="../../../../_static/pygments.css" 
type="text/css" />
     <link rel="index" title="Index" href="../../../../genindex.html" />
     <link rel="search" title="Search" href="../../../../search.html" />
-    <link rel="next" title="–&gt;" href="../densenet/README.html" />
+    <link rel="next" title="name: DenseNet models on ImageNet SINGA version: 
1.1.1 SINGA commit: license: 
https://github.com/pytorch/vision/blob/master/torchvision/models/densenet.py"; 
href="../densenet/README.html" />
     <link rel="prev" title="Train a RBM model against MNIST dataset" 
href="../../mnist/README.html" />
     <link href="../../../../_static/style.css" rel="stylesheet" 
type="text/css">
     <!--link href="../../../../_static/fontawesome-all.min.css" 
rel="stylesheet" type="text/css"-->
@@ -44,21 +50,16 @@
        }
     </style>
 
-
-  
-  <script src="../../../../_static/js/modernizr.min.js"></script>
-
 </head>
 
 <body class="wy-body-for-nav">
 
    
   <div class="wy-grid-for-nav">
-
     
     <nav data-toggle="wy-nav-shift" class="wy-nav-side">
       <div class="wy-side-scroll">
-        <div class="wy-side-nav-search">
+        <div class="wy-side-nav-search" >
           
 
           
@@ -125,15 +126,35 @@
 <li class="toctree-l4"><a class="reference internal" 
href="#instructions">Instructions</a></li>
 </ul>
 </li>
-<li class="toctree-l3"><a class="reference internal" 
href="../densenet/README.html">–&gt;</a></li>
+<li class="toctree-l3"><a class="reference internal" 
href="../densenet/README.html">name: DenseNet models on ImageNet
+SINGA version: 1.1.1
+SINGA commit:
+license: 
https://github.com/pytorch/vision/blob/master/torchvision/models/densenet.py</a></li>
 <li class="toctree-l3"><a class="reference internal" 
href="../densenet/README.html#image-classification-using-densenet">Image 
Classification using DenseNet</a></li>
-<li class="toctree-l3"><a class="reference internal" 
href="../googlenet/README.html">–&gt;</a></li>
+<li class="toctree-l3"><a class="reference internal" 
href="../googlenet/README.html">name: GoogleNet on ImageNet
+SINGA version: 1.0.1
+SINGA commit: 8c990f7da2de220e8a012c6a8ecc897dc7532744
+parameter_url: 
https://s3-ap-southeast-1.amazonaws.com/dlfile/bvlc_googlenet.tar.gz
+parameter_sha1: 0a88e8948b1abca3badfd8d090d6be03f8d7655d
+license: unrestricted 
https://github.com/BVLC/caffe/tree/master/models/bvlc_googlenet</a></li>
 <li class="toctree-l3"><a class="reference internal" 
href="../googlenet/README.html#image-classification-using-googlenet">Image 
Classification using GoogleNet</a></li>
-<li class="toctree-l3"><a class="reference internal" 
href="../inception/README.html">–&gt;</a></li>
+<li class="toctree-l3"><a class="reference internal" 
href="../inception/README.html">name: Inception V4 on ImageNet
+SINGA version: 1.1.1
+SINGA commit:
+parameter_url: 
https://s3-ap-southeast-1.amazonaws.com/dlfile/inception_v4.tar.gz
+parameter_sha1: 5fdd6f5d8af8fd10e7321d9b38bb87ef14e80d56
+license: https://github.com/tensorflow/models/tree/master/slim</a></li>
 <li class="toctree-l3"><a class="reference internal" 
href="../inception/README.html#image-classification-using-inception-v4">Image 
Classification using Inception V4</a></li>
-<li class="toctree-l3"><a class="reference internal" 
href="../resnet/README.html">–&gt;</a></li>
+<li class="toctree-l3"><a class="reference internal" 
href="../resnet/README.html">name: Resnets on ImageNet
+SINGA version: 1.1
+SINGA commit: 45ec92d8ffc1fa1385a9307fdf07e21da939ee2f
+parameter_url: 
https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-18.tar.gz
+license: Apache V2, 
https://github.com/facebook/fb.resnet.torch/blob/master/LICENSE</a></li>
 <li class="toctree-l3"><a class="reference internal" 
href="../resnet/README.html#image-classification-using-residual-networks">Image 
Classification using Residual Networks</a></li>
-<li class="toctree-l3"><a class="reference internal" 
href="../vgg/README.html">–&gt;</a></li>
+<li class="toctree-l3"><a class="reference internal" 
href="../vgg/README.html">name: VGG models on ImageNet
+SINGA version: 1.1.1
+SINGA commit:
+license: 
https://github.com/pytorch/vision/blob/master/torchvision/models/vgg.py</a></li>
 <li class="toctree-l3"><a class="reference internal" 
href="../vgg/README.html#image-classification-using-vgg">Image Classification 
using VGG</a></li>
 </ul>
 </li>
@@ -230,45 +251,44 @@
     regarding copyright ownership.  The ASF licenses this file
     to you under the Apache License, Version 2.0 (the
     "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at<div 
class="highlight-default notranslate"><div class="highlight"><pre><span></span> 
 <span class="n">http</span><span class="p">:</span><span 
class="o">//</span><span class="n">www</span><span class="o">.</span><span 
class="n">apache</span><span class="o">.</span><span class="n">org</span><span 
class="o">/</span><span class="n">licenses</span><span class="o">/</span><span 
class="n">LICENSE</span><span class="o">-</span><span class="mf">2.0</span>
+    with the License.  You may obtain a copy of the License at
 
-<span class="n">Unless</span> <span class="n">required</span> <span 
class="n">by</span> <span class="n">applicable</span> <span 
class="n">law</span> <span class="ow">or</span> <span class="n">agreed</span> 
<span class="n">to</span> <span class="ow">in</span> <span 
class="n">writing</span><span class="p">,</span>
-<span class="n">software</span> <span class="n">distributed</span> <span 
class="n">under</span> <span class="n">the</span> <span 
class="n">License</span> <span class="ow">is</span> <span 
class="n">distributed</span> <span class="n">on</span> <span class="n">an</span>
-<span class="s2">&quot;AS IS&quot;</span> <span class="n">BASIS</span><span 
class="p">,</span> <span class="n">WITHOUT</span> <span 
class="n">WARRANTIES</span> <span class="n">OR</span> <span 
class="n">CONDITIONS</span> <span class="n">OF</span> <span class="n">ANY</span>
-<span class="n">KIND</span><span class="p">,</span> <span 
class="n">either</span> <span class="n">express</span> <span 
class="ow">or</span> <span class="n">implied</span><span class="o">.</span>  
<span class="n">See</span> <span class="n">the</span> <span 
class="n">License</span> <span class="k">for</span> <span class="n">the</span>
-<span class="n">specific</span> <span class="n">language</span> <span 
class="n">governing</span> <span class="n">permissions</span> <span 
class="ow">and</span> <span class="n">limitations</span>
-<span class="n">under</span> <span class="n">the</span> <span 
class="n">License</span><span class="o">.</span>
-</pre></div>
-</div>
-<p>–&gt;</p>
-<div class="section" id="train-alexnet-over-imagenet">
-<span id="train-alexnet-over-imagenet"></span><h1>Train AlexNet over 
ImageNet<a class="headerlink" href="#train-alexnet-over-imagenet" 
title="Permalink to this headline">¶</a></h1>
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+--><div class="section" id="train-alexnet-over-imagenet">
+<h1>Train AlexNet over ImageNet<a class="headerlink" 
href="#train-alexnet-over-imagenet" title="Permalink to this 
headline">¶</a></h1>
 <p>Convolution neural network (CNN) is a type of feed-forward neural
 network widely used for image and video classification. In this example, we 
will
 use a <a class="reference external" 
href="http://papers.nips.cc/paper/4824-imagenet-classification-with-deep-convolutional-neural-networks";>deep
 CNN model</a>
 to do image classification against the ImageNet dataset.</p>
 <div class="section" id="instructions">
-<span id="instructions"></span><h2>Instructions<a class="headerlink" 
href="#instructions" title="Permalink to this headline">¶</a></h2>
+<h2>Instructions<a class="headerlink" href="#instructions" title="Permalink to 
this headline">¶</a></h2>
 <div class="section" id="compile-singa">
-<span id="compile-singa"></span><h3>Compile SINGA<a class="headerlink" 
href="#compile-singa" title="Permalink to this headline">¶</a></h3>
+<h3>Compile SINGA<a class="headerlink" href="#compile-singa" title="Permalink 
to this headline">¶</a></h3>
 <p>Please compile SINGA with CUDA, CUDNN and OpenCV. You can manually turn on 
the
 options in CMakeLists.txt or run <code class="docutils literal 
notranslate"><span class="pre">ccmake</span> <span class="pre">..</span></code> 
in build/ folder.</p>
 <p>We have tested CUDNN V4 and V5 (V5 requires CUDA 7.5)</p>
 </div>
 <div class="section" id="data-download">
-<span id="data-download"></span><h3>Data download<a class="headerlink" 
href="#data-download" title="Permalink to this headline">¶</a></h3>
+<h3>Data download<a class="headerlink" href="#data-download" title="Permalink 
to this headline">¶</a></h3>
 <ul class="simple">
-<li>Please refer to step1-3 on <a class="reference external" 
href="https://github.com/amd/OpenCL-caffe/wiki/Instructions-to-create-ImageNet-2012-data";>Instructions
 to create ImageNet 2012 data</a>
-to download and decompress the data.</li>
-<li>You can download the training and validation list by
+<li><p>Please refer to step1-3 on <a class="reference external" 
href="https://github.com/amd/OpenCL-caffe/wiki/Instructions-to-create-ImageNet-2012-data";>Instructions
 to create ImageNet 2012 data</a>
+to download and decompress the data.</p></li>
+<li><p>You can download the training and validation list by
 <a class="reference external" 
href="https://github.com/BVLC/caffe/blob/master/data/ilsvrc12/get_ilsvrc_aux.sh";>get_ilsvrc_aux.sh</a>
-or from <a class="reference external" 
href="http://www.image-net.org/download-images";>Imagenet</a>.</li>
+or from <a class="reference external" 
href="http://www.image-net.org/download-images";>Imagenet</a>.</p></li>
 </ul>
 </div>
 <div class="section" id="data-preprocessing">
-<span id="data-preprocessing"></span><h3>Data preprocessing<a 
class="headerlink" href="#data-preprocessing" title="Permalink to this 
headline">¶</a></h3>
+<h3>Data preprocessing<a class="headerlink" href="#data-preprocessing" 
title="Permalink to this headline">¶</a></h3>
 <ul>
-<li><p class="first">Assuming you have downloaded the data and the list.
+<li><p>Assuming you have downloaded the data and the list.
 Now we should transform the data into binary files. You can run:</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>    <span class="n">sh</span> <span 
class="n">create_data</span><span class="o">.</span><span class="n">sh</span>
 </pre></div>
@@ -276,41 +296,41 @@ Now we should transform the data into bi
 <p>The script will generate a test file(<code class="docutils literal 
notranslate"><span class="pre">test.bin</span></code>), a mean file(<code 
class="docutils literal notranslate"><span class="pre">mean.bin</span></code>) 
and
 several training files(<code class="docutils literal notranslate"><span 
class="pre">trainX.bin</span></code>) in the specified output folder.</p>
 </li>
-<li><p class="first">You can also change the parameters in <code 
class="docutils literal notranslate"><span 
class="pre">create_data.sh</span></code>.</p>
+<li><p>You can also change the parameters in <code class="docutils literal 
notranslate"><span class="pre">create_data.sh</span></code>.</p>
 <ul class="simple">
-<li><code class="docutils literal notranslate"><span 
class="pre">-trainlist</span> <span class="pre">&lt;file&gt;</span></code>: the 
file of training list;</li>
-<li><code class="docutils literal notranslate"><span 
class="pre">-trainfolder</span> <span class="pre">&lt;folder&gt;</span></code>: 
the folder of training images;</li>
-<li><code class="docutils literal notranslate"><span 
class="pre">-testlist</span> <span class="pre">&lt;file&gt;</span></code>: the 
file of test list;</li>
-<li><code class="docutils literal notranslate"><span 
class="pre">-testfolder</span> <span class="pre">&lt;floder&gt;</span></code>: 
the folder of test images;</li>
-<li><code class="docutils literal notranslate"><span 
class="pre">-outdata</span> <span class="pre">&lt;folder&gt;</span></code>: the 
folder to save output files, including mean, training and test files.
-The script will generate these files in the specified folder;</li>
-<li><code class="docutils literal notranslate"><span 
class="pre">-filesize</span> <span class="pre">&lt;int&gt;</span></code>: 
number of training images that stores in each binary file.</li>
+<li><p><code class="docutils literal notranslate"><span 
class="pre">-trainlist</span> <span class="pre">&lt;file&gt;</span></code>: the 
file of training list;</p></li>
+<li><p><code class="docutils literal notranslate"><span 
class="pre">-trainfolder</span> <span class="pre">&lt;folder&gt;</span></code>: 
the folder of training images;</p></li>
+<li><p><code class="docutils literal notranslate"><span 
class="pre">-testlist</span> <span class="pre">&lt;file&gt;</span></code>: the 
file of test list;</p></li>
+<li><p><code class="docutils literal notranslate"><span 
class="pre">-testfolder</span> <span class="pre">&lt;floder&gt;</span></code>: 
the folder of test images;</p></li>
+<li><p><code class="docutils literal notranslate"><span 
class="pre">-outdata</span> <span class="pre">&lt;folder&gt;</span></code>: the 
folder to save output files, including mean, training and test files.
+The script will generate these files in the specified folder;</p></li>
+<li><p><code class="docutils literal notranslate"><span 
class="pre">-filesize</span> <span class="pre">&lt;int&gt;</span></code>: 
number of training images that stores in each binary file.</p></li>
 </ul>
 </li>
 </ul>
 </div>
 <div class="section" id="training">
-<span id="training"></span><h3>Training<a class="headerlink" href="#training" 
title="Permalink to this headline">¶</a></h3>
+<h3>Training<a class="headerlink" href="#training" title="Permalink to this 
headline">¶</a></h3>
 <ul>
-<li><p class="first">After preparing data, you can run the following command 
to train the Alexnet model.</p>
+<li><p>After preparing data, you can run the following command to train the 
Alexnet model.</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>    <span class="n">sh</span> <span 
class="n">run</span><span class="o">.</span><span class="n">sh</span>
 </pre></div>
 </div>
 </li>
-<li><p class="first">You may change the parameters in <code class="docutils 
literal notranslate"><span class="pre">run.sh</span></code>.</p>
+<li><p>You may change the parameters in <code class="docutils literal 
notranslate"><span class="pre">run.sh</span></code>.</p>
 <ul class="simple">
-<li><code class="docutils literal notranslate"><span class="pre">-epoch</span> 
<span class="pre">&lt;int&gt;</span></code>: number of epoch to be trained, 
default is 90;</li>
-<li><code class="docutils literal notranslate"><span class="pre">-lr</span> 
<span class="pre">&lt;float&gt;</span></code>: base learning rate, the learning 
rate will decrease each 20 epochs,
-more specifically, <code class="docutils literal notranslate"><span 
class="pre">lr</span> <span class="pre">=</span> <span class="pre">lr</span> 
<span class="pre">*</span> <span class="pre">exp(0.1</span> <span 
class="pre">*</span> <span class="pre">(epoch</span> <span class="pre">/</span> 
<span class="pre">20))</span></code>;</li>
-<li><code class="docutils literal notranslate"><span 
class="pre">-batchsize</span> <span class="pre">&lt;int&gt;</span></code>: 
batchsize, it should be changed regarding to your memory;</li>
-<li><code class="docutils literal notranslate"><span 
class="pre">-filesize</span> <span class="pre">&lt;int&gt;</span></code>: 
number of training images that stores in each binary file, it is the
-same as the <code class="docutils literal notranslate"><span 
class="pre">filesize</span></code> in data preprocessing;</li>
-<li><code class="docutils literal notranslate"><span 
class="pre">-ntrain</span> <span class="pre">&lt;int&gt;</span></code>: number 
of training images;</li>
-<li><code class="docutils literal notranslate"><span class="pre">-ntest</span> 
<span class="pre">&lt;int&gt;</span></code>: number of test images;</li>
-<li><code class="docutils literal notranslate"><span class="pre">-data</span> 
<span class="pre">&lt;folder&gt;</span></code>: the folder which stores the 
binary files, it is exactly the output
-folder in data preprocessing step;</li>
-<li><code class="docutils literal notranslate"><span class="pre">-pfreq</span> 
<span class="pre">&lt;int&gt;</span></code>: the frequency(in batch) of 
printing current model status(loss and accuracy);</li>
-<li><code class="docutils literal notranslate"><span 
class="pre">-nthreads</span> <span class="pre">&lt;int&gt;</span></code>: the 
number of threads to load data which feed to the model.</li>
+<li><p><code class="docutils literal notranslate"><span 
class="pre">-epoch</span> <span class="pre">&lt;int&gt;</span></code>: number 
of epoch to be trained, default is 90;</p></li>
+<li><p><code class="docutils literal notranslate"><span class="pre">-lr</span> 
<span class="pre">&lt;float&gt;</span></code>: base learning rate, the learning 
rate will decrease each 20 epochs,
+more specifically, <code class="docutils literal notranslate"><span 
class="pre">lr</span> <span class="pre">=</span> <span class="pre">lr</span> 
<span class="pre">*</span> <span class="pre">exp(0.1</span> <span 
class="pre">*</span> <span class="pre">(epoch</span> <span class="pre">/</span> 
<span class="pre">20))</span></code>;</p></li>
+<li><p><code class="docutils literal notranslate"><span 
class="pre">-batchsize</span> <span class="pre">&lt;int&gt;</span></code>: 
batchsize, it should be changed regarding to your memory;</p></li>
+<li><p><code class="docutils literal notranslate"><span 
class="pre">-filesize</span> <span class="pre">&lt;int&gt;</span></code>: 
number of training images that stores in each binary file, it is the
+same as the <code class="docutils literal notranslate"><span 
class="pre">filesize</span></code> in data preprocessing;</p></li>
+<li><p><code class="docutils literal notranslate"><span 
class="pre">-ntrain</span> <span class="pre">&lt;int&gt;</span></code>: number 
of training images;</p></li>
+<li><p><code class="docutils literal notranslate"><span 
class="pre">-ntest</span> <span class="pre">&lt;int&gt;</span></code>: number 
of test images;</p></li>
+<li><p><code class="docutils literal notranslate"><span 
class="pre">-data</span> <span class="pre">&lt;folder&gt;</span></code>: the 
folder which stores the binary files, it is exactly the output
+folder in data preprocessing step;</p></li>
+<li><p><code class="docutils literal notranslate"><span 
class="pre">-pfreq</span> <span class="pre">&lt;int&gt;</span></code>: the 
frequency(in batch) of printing current model status(loss and 
accuracy);</p></li>
+<li><p><code class="docutils literal notranslate"><span 
class="pre">-nthreads</span> <span class="pre">&lt;int&gt;</span></code>: the 
number of threads to load data which feed to the model.</p></li>
 </ul>
 </li>
 </ul>
@@ -326,10 +346,10 @@ folder in data preprocessing step;</li>
   
     <div class="rst-footer-buttons" role="navigation" aria-label="footer 
navigation">
       
-        <a href="../densenet/README.html" class="btn btn-neutral float-right" 
title="–&gt;" accesskey="n" rel="next">Next <span class="fa 
fa-arrow-circle-right"></span></a>
+        <a href="../densenet/README.html" class="btn btn-neutral float-right" 
title="name: DenseNet models on ImageNet SINGA version: 1.1.1 SINGA commit: 
license: 
https://github.com/pytorch/vision/blob/master/torchvision/models/densenet.py"; 
accesskey="n" rel="next">Next <span class="fa fa-arrow-circle-right"></span></a>
       
       
-        <a href="../../mnist/README.html" class="btn btn-neutral" title="Train 
a RBM model against MNIST dataset" accesskey="p" rel="prev"><span class="fa 
fa-arrow-circle-left"></span> Previous</a>
+        <a href="../../mnist/README.html" class="btn btn-neutral float-left" 
title="Train a RBM model against MNIST dataset" accesskey="p" rel="prev"><span 
class="fa fa-arrow-circle-left"></span> Previous</a>
       
     </div>
   
@@ -338,7 +358,7 @@ folder in data preprocessing step;</li>
 
   <div role="contentinfo">
     <p>
-        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective 
owners..
+        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective owners.
 
     </p>
   </div>
@@ -355,36 +375,17 @@ folder in data preprocessing step;</li>
   
 
 
-  
-
-    <script type="text/javascript">
-        var DOCUMENTATION_OPTIONS = {
-            URL_ROOT:'../../../../',
-            VERSION:'1.1.0',
-            LANGUAGE:'None',
-            COLLAPSE_INDEX:false,
-            FILE_SUFFIX:'.html',
-            HAS_SOURCE:  true,
-            SOURCELINK_SUFFIX: '.txt'
-        };
-    </script>
-      <script type="text/javascript" 
src="../../../../_static/jquery.js"></script>
-      <script type="text/javascript" 
src="../../../../_static/underscore.js"></script>
-      <script type="text/javascript" 
src="../../../../_static/doctools.js"></script>
-
-  
-
-  
-  
-    <script type="text/javascript" 
src="../../../../_static/js/theme.js"></script>
-  
-
   <script type="text/javascript">
       jQuery(function () {
           SphinxRtdTheme.Navigation.enable(true);
       });
   </script>
 
+  
+  
+    
+  
+
 <div class="rst-versions" data-toggle="rst-versions" role="note" 
aria-label="versions">
   <span class="rst-current-version" data-toggle="rst-current-version">
     <span class="fa fa-book"> incubator-singa </span>

Modified: 
incubator/singa/site/trunk/en/docs/model_zoo/imagenet/densenet/README.html
URL: 
http://svn.apache.org/viewvc/incubator/singa/site/trunk/en/docs/model_zoo/imagenet/densenet/README.html?rev=1857911&r1=1857910&r2=1857911&view=diff
==============================================================================
--- incubator/singa/site/trunk/en/docs/model_zoo/imagenet/densenet/README.html 
(original)
+++ incubator/singa/site/trunk/en/docs/model_zoo/imagenet/densenet/README.html 
Sun Apr 21 13:56:42 2019
@@ -9,7 +9,7 @@
   
   <meta name="viewport" content="width=device-width, initial-scale=1.0">
   
-  <title>–&gt; &mdash; incubator-singa 1.1.0 documentation</title>
+  <title>name: DenseNet models on ImageNet SINGA version: 1.1.1 SINGA commit: 
license: 
https://github.com/pytorch/vision/blob/master/torchvision/models/densenet.py 
&mdash; incubator-singa 1.1.0 documentation</title>
   
 
   
@@ -18,19 +18,25 @@
   
 
   
-
-  
+  <script type="text/javascript" 
src="../../../../_static/js/modernizr.min.js"></script>
   
     
+      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../../" 
src="../../../../_static/documentation_options.js"></script>
+        <script type="text/javascript" 
src="../../../../_static/jquery.js"></script>
+        <script type="text/javascript" 
src="../../../../_static/underscore.js"></script>
+        <script type="text/javascript" 
src="../../../../_static/doctools.js"></script>
+        <script type="text/javascript" 
src="../../../../_static/language_data.js"></script>
+    
+    <script type="text/javascript" 
src="../../../../_static/js/theme.js"></script>
 
-  
+    
 
   
-    <link rel="stylesheet" href="../../../../_static/css/theme.css" 
type="text/css" />
+  <link rel="stylesheet" href="../../../../_static/css/theme.css" 
type="text/css" />
   <link rel="stylesheet" href="../../../../_static/pygments.css" 
type="text/css" />
     <link rel="index" title="Index" href="../../../../genindex.html" />
     <link rel="search" title="Search" href="../../../../search.html" />
-    <link rel="next" title="–&gt;" href="../googlenet/README.html" />
+    <link rel="next" title="name: GoogleNet on ImageNet SINGA version: 1.0.1 
SINGA commit: 8c990f7da2de220e8a012c6a8ecc897dc7532744 parameter_url: 
https://s3-ap-southeast-1.amazonaws.com/dlfile/bvlc_googlenet.tar.gz 
parameter_sha1: 0a88e8948b1abca3badfd8d090d6be03f8d7655d license: unrestricted 
https://github.com/BVLC/caffe/tree/master/models/bvlc_googlenet"; 
href="../googlenet/README.html" />
     <link rel="prev" title="Train AlexNet over ImageNet" 
href="../alexnet/README.html" />
     <link href="../../../../_static/style.css" rel="stylesheet" 
type="text/css">
     <!--link href="../../../../_static/fontawesome-all.min.css" 
rel="stylesheet" type="text/css"-->
@@ -44,21 +50,16 @@
        }
     </style>
 
-
-  
-  <script src="../../../../_static/js/modernizr.min.js"></script>
-
 </head>
 
 <body class="wy-body-for-nav">
 
    
   <div class="wy-grid-for-nav">
-
     
     <nav data-toggle="wy-nav-shift" class="wy-nav-side">
       <div class="wy-side-scroll">
-        <div class="wy-side-nav-search">
+        <div class="wy-side-nav-search" >
           
 
           
@@ -122,19 +123,39 @@
 <li class="toctree-l3"><a class="reference internal" 
href="../../char-rnn/README.html">Train Char-RNN over plain text</a></li>
 <li class="toctree-l3"><a class="reference internal" 
href="../../mnist/README.html">Train a RBM model against MNIST dataset</a></li>
 <li class="toctree-l3"><a class="reference internal" 
href="../alexnet/README.html">Train AlexNet over ImageNet</a></li>
-<li class="toctree-l3 current"><a class="current reference internal" 
href="#">–&gt;</a></li>
+<li class="toctree-l3 current"><a class="current reference internal" 
href="#">name: DenseNet models on ImageNet
+SINGA version: 1.1.1
+SINGA commit:
+license: 
https://github.com/pytorch/vision/blob/master/torchvision/models/densenet.py</a></li>
 <li class="toctree-l3"><a class="reference internal" 
href="#image-classification-using-densenet">Image Classification using 
DenseNet</a><ul>
 <li class="toctree-l4"><a class="reference internal" 
href="#instructions">Instructions</a></li>
 <li class="toctree-l4"><a class="reference internal" 
href="#details">Details</a></li>
 </ul>
 </li>
-<li class="toctree-l3"><a class="reference internal" 
href="../googlenet/README.html">–&gt;</a></li>
+<li class="toctree-l3"><a class="reference internal" 
href="../googlenet/README.html">name: GoogleNet on ImageNet
+SINGA version: 1.0.1
+SINGA commit: 8c990f7da2de220e8a012c6a8ecc897dc7532744
+parameter_url: 
https://s3-ap-southeast-1.amazonaws.com/dlfile/bvlc_googlenet.tar.gz
+parameter_sha1: 0a88e8948b1abca3badfd8d090d6be03f8d7655d
+license: unrestricted 
https://github.com/BVLC/caffe/tree/master/models/bvlc_googlenet</a></li>
 <li class="toctree-l3"><a class="reference internal" 
href="../googlenet/README.html#image-classification-using-googlenet">Image 
Classification using GoogleNet</a></li>
-<li class="toctree-l3"><a class="reference internal" 
href="../inception/README.html">–&gt;</a></li>
+<li class="toctree-l3"><a class="reference internal" 
href="../inception/README.html">name: Inception V4 on ImageNet
+SINGA version: 1.1.1
+SINGA commit:
+parameter_url: 
https://s3-ap-southeast-1.amazonaws.com/dlfile/inception_v4.tar.gz
+parameter_sha1: 5fdd6f5d8af8fd10e7321d9b38bb87ef14e80d56
+license: https://github.com/tensorflow/models/tree/master/slim</a></li>
 <li class="toctree-l3"><a class="reference internal" 
href="../inception/README.html#image-classification-using-inception-v4">Image 
Classification using Inception V4</a></li>
-<li class="toctree-l3"><a class="reference internal" 
href="../resnet/README.html">–&gt;</a></li>
+<li class="toctree-l3"><a class="reference internal" 
href="../resnet/README.html">name: Resnets on ImageNet
+SINGA version: 1.1
+SINGA commit: 45ec92d8ffc1fa1385a9307fdf07e21da939ee2f
+parameter_url: 
https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-18.tar.gz
+license: Apache V2, 
https://github.com/facebook/fb.resnet.torch/blob/master/LICENSE</a></li>
 <li class="toctree-l3"><a class="reference internal" 
href="../resnet/README.html#image-classification-using-residual-networks">Image 
Classification using Residual Networks</a></li>
-<li class="toctree-l3"><a class="reference internal" 
href="../vgg/README.html">–&gt;</a></li>
+<li class="toctree-l3"><a class="reference internal" 
href="../vgg/README.html">name: VGG models on ImageNet
+SINGA version: 1.1.1
+SINGA commit:
+license: 
https://github.com/pytorch/vision/blob/master/torchvision/models/vgg.py</a></li>
 <li class="toctree-l3"><a class="reference internal" 
href="../vgg/README.html#image-classification-using-vgg">Image Classification 
using VGG</a></li>
 </ul>
 </li>
@@ -207,7 +228,10 @@
         
           <li><a href="../../index.html">Model Zoo</a> &raquo;</li>
         
-      <li>–&gt;</li>
+      <li>name: DenseNet models on ImageNet
+SINGA version: 1.1.1
+SINGA commit:
+license: 
https://github.com/pytorch/vision/blob/master/torchvision/models/densenet.py</li>
     
     
       <li class="wy-breadcrumbs-aside">
@@ -231,44 +255,43 @@
     regarding copyright ownership.  The ASF licenses this file
     to you under the Apache License, Version 2.0 (the
     "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at<div 
class="highlight-default notranslate"><div class="highlight"><pre><span></span> 
 <span class="n">http</span><span class="p">:</span><span 
class="o">//</span><span class="n">www</span><span class="o">.</span><span 
class="n">apache</span><span class="o">.</span><span class="n">org</span><span 
class="o">/</span><span class="n">licenses</span><span class="o">/</span><span 
class="n">LICENSE</span><span class="o">-</span><span class="mf">2.0</span>
+    with the License.  You may obtain a copy of the License at
 
-<span class="n">Unless</span> <span class="n">required</span> <span 
class="n">by</span> <span class="n">applicable</span> <span 
class="n">law</span> <span class="ow">or</span> <span class="n">agreed</span> 
<span class="n">to</span> <span class="ow">in</span> <span 
class="n">writing</span><span class="p">,</span>
-<span class="n">software</span> <span class="n">distributed</span> <span 
class="n">under</span> <span class="n">the</span> <span 
class="n">License</span> <span class="ow">is</span> <span 
class="n">distributed</span> <span class="n">on</span> <span class="n">an</span>
-<span class="s2">&quot;AS IS&quot;</span> <span class="n">BASIS</span><span 
class="p">,</span> <span class="n">WITHOUT</span> <span 
class="n">WARRANTIES</span> <span class="n">OR</span> <span 
class="n">CONDITIONS</span> <span class="n">OF</span> <span class="n">ANY</span>
-<span class="n">KIND</span><span class="p">,</span> <span 
class="n">either</span> <span class="n">express</span> <span 
class="ow">or</span> <span class="n">implied</span><span class="o">.</span>  
<span class="n">See</span> <span class="n">the</span> <span 
class="n">License</span> <span class="k">for</span> <span class="n">the</span>
-<span class="n">specific</span> <span class="n">language</span> <span 
class="n">governing</span> <span class="n">permissions</span> <span 
class="ow">and</span> <span class="n">limitations</span>
-<span class="n">under</span> <span class="n">the</span> <span 
class="n">License</span><span class="o">.</span>
-</pre></div>
-</div>
-<div class="section" id="">
-<span id="id1"></span><h1>–&gt;<a class="headerlink" href="#" 
title="Permalink to this headline">¶</a></h1>
-<p>name: DenseNet models on ImageNet
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+--><hr class="docutils" />
+<div class="section" 
id="name-densenet-models-on-imagenet-singa-version-1-1-1-singa-commit-license-https-github-com-pytorch-vision-blob-master-torchvision-models-densenet-py">
+<h1>name: DenseNet models on ImageNet
 SINGA version: 1.1.1
 SINGA commit:
-license: 
https://github.com/pytorch/vision/blob/master/torchvision/models/densenet.py</p>
+license: 
https://github.com/pytorch/vision/blob/master/torchvision/models/densenet.py<a 
class="headerlink" 
href="#name-densenet-models-on-imagenet-singa-version-1-1-1-singa-commit-license-https-github-com-pytorch-vision-blob-master-torchvision-models-densenet-py"
 title="Permalink to this headline">¶</a></h1>
 </div>
-<hr class="docutils" />
 <div class="section" id="image-classification-using-densenet">
-<span id="image-classification-using-densenet"></span><h1>Image Classification 
using DenseNet<a class="headerlink" href="#image-classification-using-densenet" 
title="Permalink to this headline">¶</a></h1>
+<h1>Image Classification using DenseNet<a class="headerlink" 
href="#image-classification-using-densenet" title="Permalink to this 
headline">¶</a></h1>
 <p>In this example, we convert DenseNet on <a class="reference external" 
href="https://github.com/pytorch/vision/blob/master/torchvision/models/densenet.py";>PyTorch</a>
 to SINGA for image classification.</p>
 <div class="section" id="instructions">
-<span id="instructions"></span><h2>Instructions<a class="headerlink" 
href="#instructions" title="Permalink to this headline">¶</a></h2>
+<h2>Instructions<a class="headerlink" href="#instructions" title="Permalink to 
this headline">¶</a></h2>
 <ul>
-<li><p class="first">Download one parameter checkpoint file (see below) and 
the synset word file of ImageNet into this folder, e.g.,</p>
+<li><p>Download one parameter checkpoint file (see below) and the synset word 
file of ImageNet into this folder, e.g.,</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  $ wget 
https://s3-ap-southeast-1.amazonaws.com/dlfile/densenet/densenet-121.tar.gz
   $ wget https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/synset_words.txt
   $ tar xvf densenet-121.tar.gz
 </pre></div>
 </div>
 </li>
-<li><p class="first">Usage</p>
+<li><p>Usage</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  $ python serve.py -h
 </pre></div>
 </div>
 </li>
-<li><p class="first">Example</p>
+<li><p>Example</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  # use cpu
   $ python serve.py --use_cpu --parameter_file densenet-121.pickle --depth 121 
&amp;
   # use gpu
@@ -278,7 +301,7 @@ to SINGA for image classification.</p>
 <p>The parameter files for the following model and depth configuration pairs 
are provided:
 <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/densenet/densenet-121.tar.gz";>121</a>,
 <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/densenet/densenet-169.tar.gz";>169</a>,
 <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/densenet/densenet-201.tar.gz";>201</a>,
 <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/densenet/densenet-161.tar.gz";>161</a></p>
 </li>
-<li><p class="first">Submit images for classification</p>
+<li><p>Submit images for classification</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  $ curl -i -F [email protected] 
http://localhost:9999/api
   $ curl -i -F [email protected] http://localhost:9999/api
   $ curl -i -F [email protected] http://localhost:9999/api
@@ -289,7 +312,7 @@ to SINGA for image classification.</p>
 <p>image1.jpg, image2.jpg and image3.jpg should be downloaded before executing 
the above commands.</p>
 </div>
 <div class="section" id="details">
-<span id="details"></span><h2>Details<a class="headerlink" href="#details" 
title="Permalink to this headline">¶</a></h2>
+<h2>Details<a class="headerlink" href="#details" title="Permalink to this 
headline">¶</a></h2>
 <p>The parameter files were converted from the pytorch via the convert.py 
program.</p>
 <p>Usage:</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>$ python convert.py -h
@@ -306,10 +329,10 @@ to SINGA for image classification.</p>
   
     <div class="rst-footer-buttons" role="navigation" aria-label="footer 
navigation">
       
-        <a href="../googlenet/README.html" class="btn btn-neutral float-right" 
title="–&gt;" accesskey="n" rel="next">Next <span class="fa 
fa-arrow-circle-right"></span></a>
+        <a href="../googlenet/README.html" class="btn btn-neutral float-right" 
title="name: GoogleNet on ImageNet SINGA version: 1.0.1 SINGA commit: 
8c990f7da2de220e8a012c6a8ecc897dc7532744 parameter_url: 
https://s3-ap-southeast-1.amazonaws.com/dlfile/bvlc_googlenet.tar.gz 
parameter_sha1: 0a88e8948b1abca3badfd8d090d6be03f8d7655d license: unrestricted 
https://github.com/BVLC/caffe/tree/master/models/bvlc_googlenet"; accesskey="n" 
rel="next">Next <span class="fa fa-arrow-circle-right"></span></a>
       
       
-        <a href="../alexnet/README.html" class="btn btn-neutral" title="Train 
AlexNet over ImageNet" accesskey="p" rel="prev"><span class="fa 
fa-arrow-circle-left"></span> Previous</a>
+        <a href="../alexnet/README.html" class="btn btn-neutral float-left" 
title="Train AlexNet over ImageNet" accesskey="p" rel="prev"><span class="fa 
fa-arrow-circle-left"></span> Previous</a>
       
     </div>
   
@@ -318,7 +341,7 @@ to SINGA for image classification.</p>
 
   <div role="contentinfo">
     <p>
-        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective 
owners..
+        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective owners.
 
     </p>
   </div>
@@ -335,36 +358,17 @@ to SINGA for image classification.</p>
   
 
 
-  
-
-    <script type="text/javascript">
-        var DOCUMENTATION_OPTIONS = {
-            URL_ROOT:'../../../../',
-            VERSION:'1.1.0',
-            LANGUAGE:'None',
-            COLLAPSE_INDEX:false,
-            FILE_SUFFIX:'.html',
-            HAS_SOURCE:  true,
-            SOURCELINK_SUFFIX: '.txt'
-        };
-    </script>
-      <script type="text/javascript" 
src="../../../../_static/jquery.js"></script>
-      <script type="text/javascript" 
src="../../../../_static/underscore.js"></script>
-      <script type="text/javascript" 
src="../../../../_static/doctools.js"></script>
-
-  
-
-  
-  
-    <script type="text/javascript" 
src="../../../../_static/js/theme.js"></script>
-  
-
   <script type="text/javascript">
       jQuery(function () {
           SphinxRtdTheme.Navigation.enable(true);
       });
   </script>
 
+  
+  
+    
+  
+
 <div class="rst-versions" data-toggle="rst-versions" role="note" 
aria-label="versions">
   <span class="rst-current-version" data-toggle="rst-current-version">
     <span class="fa fa-book"> incubator-singa </span>


Reply via email to