Modified: 
incubator/singa/site/trunk/en/docs/model_zoo/examples/cifar10/README.html
URL: 
http://svn.apache.org/viewvc/incubator/singa/site/trunk/en/docs/model_zoo/examples/cifar10/README.html?rev=1862313&r1=1862312&r2=1862313&view=diff
==============================================================================
--- incubator/singa/site/trunk/en/docs/model_zoo/examples/cifar10/README.html 
(original)
+++ incubator/singa/site/trunk/en/docs/model_zoo/examples/cifar10/README.html 
Sat Jun 29 14:42:24 2019
@@ -18,21 +18,15 @@
   
 
   
-  <script type="text/javascript" 
src="../../../../_static/js/modernizr.min.js"></script>
+
+  
   
     
-      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../../" 
src="../../../../_static/documentation_options.js"></script>
-        <script type="text/javascript" 
src="../../../../_static/jquery.js"></script>
-        <script type="text/javascript" 
src="../../../../_static/underscore.js"></script>
-        <script type="text/javascript" 
src="../../../../_static/doctools.js"></script>
-        <script type="text/javascript" 
src="../../../../_static/language_data.js"></script>
-    
-    <script type="text/javascript" 
src="../../../../_static/js/theme.js"></script>
 
-    
+  
 
   
-  <link rel="stylesheet" href="../../../../_static/css/theme.css" 
type="text/css" />
+    <link rel="stylesheet" href="../../../../_static/css/theme.css" 
type="text/css" />
   <link rel="stylesheet" href="../../../../_static/pygments.css" 
type="text/css" />
     <link rel="index" title="Index" href="../../../../genindex.html" />
     <link rel="search" title="Search" href="../../../../search.html" />
@@ -48,16 +42,21 @@
        }
     </style>
 
+
+  
+  <script src="../../../../_static/js/modernizr.min.js"></script>
+
 </head>
 
 <body class="wy-body-for-nav">
 
    
   <div class="wy-grid-for-nav">
+
     
     <nav data-toggle="wy-nav-shift" class="wy-nav-side">
       <div class="wy-side-scroll">
-        <div class="wy-side-nav-search" >
+        <div class="wy-side-nav-search">
           
 
           
@@ -186,38 +185,39 @@
     regarding copyright ownership.  The ASF licenses this file
     to you under the Apache License, Version 2.0 (the
     "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
+    with the License.  You may obtain a copy of the License at<div 
class="highlight-default notranslate"><div class="highlight"><pre><span></span> 
 <span class="n">http</span><span class="p">:</span><span 
class="o">//</span><span class="n">www</span><span class="o">.</span><span 
class="n">apache</span><span class="o">.</span><span class="n">org</span><span 
class="o">/</span><span class="n">licenses</span><span class="o">/</span><span 
class="n">LICENSE</span><span class="o">-</span><span class="mf">2.0</span>
 
-      http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
---><div class="section" id="train-cnn-over-cifar-10">
-<h1>Train CNN over Cifar-10<a class="headerlink" 
href="#train-cnn-over-cifar-10" title="Permalink to this headline">¶</a></h1>
+<span class="n">Unless</span> <span class="n">required</span> <span 
class="n">by</span> <span class="n">applicable</span> <span 
class="n">law</span> <span class="ow">or</span> <span class="n">agreed</span> 
<span class="n">to</span> <span class="ow">in</span> <span 
class="n">writing</span><span class="p">,</span>
+<span class="n">software</span> <span class="n">distributed</span> <span 
class="n">under</span> <span class="n">the</span> <span 
class="n">License</span> <span class="ow">is</span> <span 
class="n">distributed</span> <span class="n">on</span> <span class="n">an</span>
+<span class="s2">&quot;AS IS&quot;</span> <span class="n">BASIS</span><span 
class="p">,</span> <span class="n">WITHOUT</span> <span 
class="n">WARRANTIES</span> <span class="n">OR</span> <span 
class="n">CONDITIONS</span> <span class="n">OF</span> <span class="n">ANY</span>
+<span class="n">KIND</span><span class="p">,</span> <span 
class="n">either</span> <span class="n">express</span> <span 
class="ow">or</span> <span class="n">implied</span><span class="o">.</span>  
<span class="n">See</span> <span class="n">the</span> <span 
class="n">License</span> <span class="k">for</span> <span class="n">the</span>
+<span class="n">specific</span> <span class="n">language</span> <span 
class="n">governing</span> <span class="n">permissions</span> <span 
class="ow">and</span> <span class="n">limitations</span>
+<span class="n">under</span> <span class="n">the</span> <span 
class="n">License</span><span class="o">.</span>
+</pre></div>
+</div>
+<p>–&gt;</p>
+<div class="section" id="train-cnn-over-cifar-10">
+<span id="train-cnn-over-cifar-10"></span><h1>Train CNN over Cifar-10<a 
class="headerlink" href="#train-cnn-over-cifar-10" title="Permalink to this 
headline">¶</a></h1>
 <p>Convolution neural network (CNN) is a type of feed-forward artificial neural
 network widely used for image and video classification. In this example, we
 will train three deep CNN models to do image classification for the CIFAR-10 
dataset,</p>
 <ol class="simple">
-<li><p><a class="reference external" 
href="https://code.google.com/p/cuda-convnet/source/browse/trunk/example-layers/layers-18pct.cfg";>AlexNet</a>
-the best validation accuracy (without data augmentation) we achieved was about 
82%.</p></li>
-<li><p><a class="reference external" 
href="http://torch.ch/blog/2015/07/30/cifar.html";>VGGNet</a>, the best 
validation accuracy (without data augmentation) we achieved was about 
89%.</p></li>
-<li><p><a class="reference external" 
href="https://github.com/facebook/fb.resnet.torch";>ResNet</a>, the best 
validation accuracy (without data augmentation) we achieved was about 
83%.</p></li>
-<li><p><a class="reference external" 
href="https://github.com/BVLC/caffe/tree/master/examples/cifar10";>Alexnet from 
Caffe</a>, SINGA is able to convert model from Caffe seamlessly.</p></li>
+<li><a class="reference external" 
href="https://code.google.com/p/cuda-convnet/source/browse/trunk/example-layers/layers-18pct.cfg";>AlexNet</a>
+the best validation accuracy (without data augmentation) we achieved was about 
82%.</li>
+<li><a class="reference external" 
href="http://torch.ch/blog/2015/07/30/cifar.html";>VGGNet</a>, the best 
validation accuracy (without data augmentation) we achieved was about 89%.</li>
+<li><a class="reference external" 
href="https://github.com/facebook/fb.resnet.torch";>ResNet</a>, the best 
validation accuracy (without data augmentation) we achieved was about 83%.</li>
+<li><a class="reference external" 
href="https://github.com/BVLC/caffe/tree/master/examples/cifar10";>Alexnet from 
Caffe</a>, SINGA is able to convert model from Caffe seamlessly.</li>
 </ol>
 <div class="section" id="instructions">
-<h2>Instructions<a class="headerlink" href="#instructions" title="Permalink to 
this headline">¶</a></h2>
+<span id="instructions"></span><h2>Instructions<a class="headerlink" 
href="#instructions" title="Permalink to this headline">¶</a></h2>
 <div class="section" id="singa-installation">
-<h3>SINGA installation<a class="headerlink" href="#singa-installation" 
title="Permalink to this headline">¶</a></h3>
+<span id="singa-installation"></span><h3>SINGA installation<a 
class="headerlink" href="#singa-installation" title="Permalink to this 
headline">¶</a></h3>
 <p>Users can compile and install SINGA from source or install the Python 
version.
 The code can ran on both CPU and GPU. For GPU training, CUDA and CUDNN (V4 or 
V5)
 are required. Please refer to the installation page for detailed 
instructions.</p>
 </div>
 <div class="section" id="data-preparation">
-<h3>Data preparation<a class="headerlink" href="#data-preparation" 
title="Permalink to this headline">¶</a></h3>
+<span id="data-preparation"></span><h3>Data preparation<a class="headerlink" 
href="#data-preparation" title="Permalink to this headline">¶</a></h3>
 <p>The binary Cifar-10 dataset could be downloaded by</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span><span class="n">python</span> <span 
class="n">download_data</span><span class="o">.</span><span class="n">py</span> 
<span class="nb">bin</span>
 </pre></div>
@@ -228,10 +228,10 @@ are required. Please refer to the instal
 </div>
 </div>
 <div class="section" id="training">
-<h3>Training<a class="headerlink" href="#training" title="Permalink to this 
headline">¶</a></h3>
+<span id="training"></span><h3>Training<a class="headerlink" href="#training" 
title="Permalink to this headline">¶</a></h3>
 <p>There are four training programs</p>
 <ol>
-<li><p>train.py. The following command would train the VGG model using the 
python
+<li><p class="first">train.py. The following command would train the VGG model 
using the python
 version of the Cifar-10 dataset in ‘cifar-10-batches-py’ folder.</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span> <span class="n">python</span> <span 
class="n">train</span><span class="o">.</span><span class="n">py</span> <span 
class="n">vgg</span> <span class="n">cifar</span><span class="o">-</span><span 
class="mi">10</span><span class="o">-</span><span class="n">batches</span><span 
class="o">-</span><span class="n">py</span>
 </pre></div>
@@ -244,23 +244,24 @@ argument</p>
 </pre></div>
 </div>
 </li>
-<li><p>alexnet.cc. It trains the AlexNet model using the CPP APIs on a 
CudaGPU,</p>
+<li><p class="first">alexnet.cc. It trains the AlexNet model using the CPP 
APIs on a CudaGPU,</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span> <span class="o">./</span><span 
class="n">run</span><span class="o">.</span><span class="n">sh</span>
 </pre></div>
 </div>
 </li>
-<li><p>alexnet-parallel.cc. It trains the AlexNet model using the CPP APIs on 
two CudaGPU devices.
+<li><p class="first">alexnet-parallel.cc. It trains the AlexNet model using 
the CPP APIs on two CudaGPU devices.
 The two devices run synchronously to compute the gradients of the mode 
parameters, which are
 averaged on the host CPU device and then be applied to update the 
parameters.</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span> <span class="o">./</span><span 
class="n">run</span><span class="o">-</span><span 
class="n">parallel</span><span class="o">.</span><span class="n">sh</span>
 </pre></div>
 </div>
 </li>
-<li><p>vgg-parallel.cc. It trains the VGG model using the CPP APIs on two 
CudaGPU devices similar to alexnet-parallel.cc.</p></li>
+<li><p class="first">vgg-parallel.cc. It trains the VGG model using the CPP 
APIs on two CudaGPU devices similar to alexnet-parallel.cc.</p>
+</li>
 </ol>
 </div>
 <div class="section" id="prediction">
-<h3>Prediction<a class="headerlink" href="#prediction" title="Permalink to 
this headline">¶</a></h3>
+<span id="prediction"></span><h3>Prediction<a class="headerlink" 
href="#prediction" title="Permalink to this headline">¶</a></h3>
 <p>predict.py includes the prediction function</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>    <span class="k">def</span> <span 
class="nf">predict</span><span class="p">(</span><span 
class="n">net</span><span class="p">,</span> <span class="n">images</span><span 
class="p">,</span> <span class="n">dev</span><span class="p">,</span> <span 
class="n">topk</span><span class="o">=</span><span class="mi">5</span><span 
class="p">)</span>
 </pre></div>
@@ -288,7 +289,7 @@ The ‘model.bin’ file generated
 
   <div role="contentinfo">
     <p>
-        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective owners.
+        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective 
owners..
 
     </p>
   </div>
@@ -305,17 +306,36 @@ The ‘model.bin’ file generated
   
 
 
+  
+
+    <script type="text/javascript">
+        var DOCUMENTATION_OPTIONS = {
+            URL_ROOT:'../../../../',
+            VERSION:'1.1.0',
+            LANGUAGE:'None',
+            COLLAPSE_INDEX:false,
+            FILE_SUFFIX:'.html',
+            HAS_SOURCE:  true,
+            SOURCELINK_SUFFIX: '.txt'
+        };
+    </script>
+      <script type="text/javascript" 
src="../../../../_static/jquery.js"></script>
+      <script type="text/javascript" 
src="../../../../_static/underscore.js"></script>
+      <script type="text/javascript" 
src="../../../../_static/doctools.js"></script>
+
+  
+
+  
+  
+    <script type="text/javascript" 
src="../../../../_static/js/theme.js"></script>
+  
+
   <script type="text/javascript">
       jQuery(function () {
           SphinxRtdTheme.Navigation.enable(true);
       });
   </script>
 
-  
-  
-    
-  
-
 <div class="rst-versions" data-toggle="rst-versions" role="note" 
aria-label="versions">
   <span class="rst-current-version" data-toggle="rst-current-version">
     <span class="fa fa-book"> incubator-singa </span>

Modified: 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/alexnet/README.html
URL: 
http://svn.apache.org/viewvc/incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/alexnet/README.html?rev=1862313&r1=1862312&r2=1862313&view=diff
==============================================================================
--- 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/alexnet/README.html
 (original)
+++ 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/alexnet/README.html
 Sat Jun 29 14:42:24 2019
@@ -18,21 +18,15 @@
   
 
   
-  <script type="text/javascript" 
src="../../../../../_static/js/modernizr.min.js"></script>
+
+  
   
     
-      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../../../" 
src="../../../../../_static/documentation_options.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/jquery.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/underscore.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/doctools.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/language_data.js"></script>
-    
-    <script type="text/javascript" 
src="../../../../../_static/js/theme.js"></script>
 
-    
+  
 
   
-  <link rel="stylesheet" href="../../../../../_static/css/theme.css" 
type="text/css" />
+    <link rel="stylesheet" href="../../../../../_static/css/theme.css" 
type="text/css" />
   <link rel="stylesheet" href="../../../../../_static/pygments.css" 
type="text/css" />
     <link rel="index" title="Index" href="../../../../../genindex.html" />
     <link rel="search" title="Search" href="../../../../../search.html" />
@@ -48,16 +42,21 @@
        }
     </style>
 
+
+  
+  <script src="../../../../../_static/js/modernizr.min.js"></script>
+
 </head>
 
 <body class="wy-body-for-nav">
 
    
   <div class="wy-grid-for-nav">
+
     
     <nav data-toggle="wy-nav-shift" class="wy-nav-side">
       <div class="wy-side-scroll">
-        <div class="wy-side-nav-search" >
+        <div class="wy-side-nav-search">
           
 
           
@@ -186,44 +185,45 @@
     regarding copyright ownership.  The ASF licenses this file
     to you under the Apache License, Version 2.0 (the
     "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
+    with the License.  You may obtain a copy of the License at<div 
class="highlight-default notranslate"><div class="highlight"><pre><span></span> 
 <span class="n">http</span><span class="p">:</span><span 
class="o">//</span><span class="n">www</span><span class="o">.</span><span 
class="n">apache</span><span class="o">.</span><span class="n">org</span><span 
class="o">/</span><span class="n">licenses</span><span class="o">/</span><span 
class="n">LICENSE</span><span class="o">-</span><span class="mf">2.0</span>
 
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
---><div class="section" id="train-alexnet-over-imagenet">
-<h1>Train AlexNet over ImageNet<a class="headerlink" 
href="#train-alexnet-over-imagenet" title="Permalink to this 
headline">¶</a></h1>
+<span class="n">Unless</span> <span class="n">required</span> <span 
class="n">by</span> <span class="n">applicable</span> <span 
class="n">law</span> <span class="ow">or</span> <span class="n">agreed</span> 
<span class="n">to</span> <span class="ow">in</span> <span 
class="n">writing</span><span class="p">,</span>
+<span class="n">software</span> <span class="n">distributed</span> <span 
class="n">under</span> <span class="n">the</span> <span 
class="n">License</span> <span class="ow">is</span> <span 
class="n">distributed</span> <span class="n">on</span> <span class="n">an</span>
+<span class="s2">&quot;AS IS&quot;</span> <span class="n">BASIS</span><span 
class="p">,</span> <span class="n">WITHOUT</span> <span 
class="n">WARRANTIES</span> <span class="n">OR</span> <span 
class="n">CONDITIONS</span> <span class="n">OF</span> <span class="n">ANY</span>
+<span class="n">KIND</span><span class="p">,</span> <span 
class="n">either</span> <span class="n">express</span> <span 
class="ow">or</span> <span class="n">implied</span><span class="o">.</span>  
<span class="n">See</span> <span class="n">the</span> <span 
class="n">License</span> <span class="k">for</span> <span class="n">the</span>
+<span class="n">specific</span> <span class="n">language</span> <span 
class="n">governing</span> <span class="n">permissions</span> <span 
class="ow">and</span> <span class="n">limitations</span>
+<span class="n">under</span> <span class="n">the</span> <span 
class="n">License</span><span class="o">.</span>
+</pre></div>
+</div>
+<p>–&gt;</p>
+<div class="section" id="train-alexnet-over-imagenet">
+<span id="train-alexnet-over-imagenet"></span><h1>Train AlexNet over 
ImageNet<a class="headerlink" href="#train-alexnet-over-imagenet" 
title="Permalink to this headline">¶</a></h1>
 <p>Convolution neural network (CNN) is a type of feed-forward neural
 network widely used for image and video classification. In this example, we 
will
 use a <a class="reference external" 
href="http://papers.nips.cc/paper/4824-imagenet-classification-with-deep-convolutional-neural-networks";>deep
 CNN model</a>
 to do image classification against the ImageNet dataset.</p>
 <div class="section" id="instructions">
-<h2>Instructions<a class="headerlink" href="#instructions" title="Permalink to 
this headline">¶</a></h2>
+<span id="instructions"></span><h2>Instructions<a class="headerlink" 
href="#instructions" title="Permalink to this headline">¶</a></h2>
 <div class="section" id="compile-singa">
-<h3>Compile SINGA<a class="headerlink" href="#compile-singa" title="Permalink 
to this headline">¶</a></h3>
+<span id="compile-singa"></span><h3>Compile SINGA<a class="headerlink" 
href="#compile-singa" title="Permalink to this headline">¶</a></h3>
 <p>Please compile SINGA with CUDA, CUDNN and OpenCV. You can manually turn on 
the
 options in CMakeLists.txt or run <code class="docutils literal 
notranslate"><span class="pre">ccmake</span> <span class="pre">..</span></code> 
in build/ folder.</p>
 <p>We have tested CUDNN V4 and V5 (V5 requires CUDA 7.5)</p>
 </div>
 <div class="section" id="data-download">
-<h3>Data download<a class="headerlink" href="#data-download" title="Permalink 
to this headline">¶</a></h3>
+<span id="data-download"></span><h3>Data download<a class="headerlink" 
href="#data-download" title="Permalink to this headline">¶</a></h3>
 <ul class="simple">
-<li><p>Please refer to step1-3 on <a class="reference external" 
href="https://github.com/amd/OpenCL-caffe/wiki/Instructions-to-create-ImageNet-2012-data";>Instructions
 to create ImageNet 2012 data</a>
-to download and decompress the data.</p></li>
-<li><p>You can download the training and validation list by
+<li>Please refer to step1-3 on <a class="reference external" 
href="https://github.com/amd/OpenCL-caffe/wiki/Instructions-to-create-ImageNet-2012-data";>Instructions
 to create ImageNet 2012 data</a>
+to download and decompress the data.</li>
+<li>You can download the training and validation list by
 <a class="reference external" 
href="https://github.com/BVLC/caffe/blob/master/data/ilsvrc12/get_ilsvrc_aux.sh";>get_ilsvrc_aux.sh</a>
-or from <a class="reference external" 
href="http://www.image-net.org/download-images";>Imagenet</a>.</p></li>
+or from <a class="reference external" 
href="http://www.image-net.org/download-images";>Imagenet</a>.</li>
 </ul>
 </div>
 <div class="section" id="data-preprocessing">
-<h3>Data preprocessing<a class="headerlink" href="#data-preprocessing" 
title="Permalink to this headline">¶</a></h3>
+<span id="data-preprocessing"></span><h3>Data preprocessing<a 
class="headerlink" href="#data-preprocessing" title="Permalink to this 
headline">¶</a></h3>
 <ul>
-<li><p>Assuming you have downloaded the data and the list.
+<li><p class="first">Assuming you have downloaded the data and the list.
 Now we should transform the data into binary files. You can run:</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>    <span class="n">sh</span> <span 
class="n">create_data</span><span class="o">.</span><span class="n">sh</span>
 </pre></div>
@@ -231,41 +231,41 @@ Now we should transform the data into bi
 <p>The script will generate a test file(<code class="docutils literal 
notranslate"><span class="pre">test.bin</span></code>), a mean file(<code 
class="docutils literal notranslate"><span class="pre">mean.bin</span></code>) 
and
 several training files(<code class="docutils literal notranslate"><span 
class="pre">trainX.bin</span></code>) in the specified output folder.</p>
 </li>
-<li><p>You can also change the parameters in <code class="docutils literal 
notranslate"><span class="pre">create_data.sh</span></code>.</p>
+<li><p class="first">You can also change the parameters in <code 
class="docutils literal notranslate"><span 
class="pre">create_data.sh</span></code>.</p>
 <ul class="simple">
-<li><p><code class="docutils literal notranslate"><span 
class="pre">-trainlist</span> <span class="pre">&lt;file&gt;</span></code>: the 
file of training list;</p></li>
-<li><p><code class="docutils literal notranslate"><span 
class="pre">-trainfolder</span> <span class="pre">&lt;folder&gt;</span></code>: 
the folder of training images;</p></li>
-<li><p><code class="docutils literal notranslate"><span 
class="pre">-testlist</span> <span class="pre">&lt;file&gt;</span></code>: the 
file of test list;</p></li>
-<li><p><code class="docutils literal notranslate"><span 
class="pre">-testfolder</span> <span class="pre">&lt;floder&gt;</span></code>: 
the folder of test images;</p></li>
-<li><p><code class="docutils literal notranslate"><span 
class="pre">-outdata</span> <span class="pre">&lt;folder&gt;</span></code>: the 
folder to save output files, including mean, training and test files.
-The script will generate these files in the specified folder;</p></li>
-<li><p><code class="docutils literal notranslate"><span 
class="pre">-filesize</span> <span class="pre">&lt;int&gt;</span></code>: 
number of training images that stores in each binary file.</p></li>
+<li><code class="docutils literal notranslate"><span 
class="pre">-trainlist</span> <span class="pre">&lt;file&gt;</span></code>: the 
file of training list;</li>
+<li><code class="docutils literal notranslate"><span 
class="pre">-trainfolder</span> <span class="pre">&lt;folder&gt;</span></code>: 
the folder of training images;</li>
+<li><code class="docutils literal notranslate"><span 
class="pre">-testlist</span> <span class="pre">&lt;file&gt;</span></code>: the 
file of test list;</li>
+<li><code class="docutils literal notranslate"><span 
class="pre">-testfolder</span> <span class="pre">&lt;floder&gt;</span></code>: 
the folder of test images;</li>
+<li><code class="docutils literal notranslate"><span 
class="pre">-outdata</span> <span class="pre">&lt;folder&gt;</span></code>: the 
folder to save output files, including mean, training and test files.
+The script will generate these files in the specified folder;</li>
+<li><code class="docutils literal notranslate"><span 
class="pre">-filesize</span> <span class="pre">&lt;int&gt;</span></code>: 
number of training images that stores in each binary file.</li>
 </ul>
 </li>
 </ul>
 </div>
 <div class="section" id="training">
-<h3>Training<a class="headerlink" href="#training" title="Permalink to this 
headline">¶</a></h3>
+<span id="training"></span><h3>Training<a class="headerlink" href="#training" 
title="Permalink to this headline">¶</a></h3>
 <ul>
-<li><p>After preparing data, you can run the following command to train the 
Alexnet model.</p>
+<li><p class="first">After preparing data, you can run the following command 
to train the Alexnet model.</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>    <span class="n">sh</span> <span 
class="n">run</span><span class="o">.</span><span class="n">sh</span>
 </pre></div>
 </div>
 </li>
-<li><p>You may change the parameters in <code class="docutils literal 
notranslate"><span class="pre">run.sh</span></code>.</p>
+<li><p class="first">You may change the parameters in <code class="docutils 
literal notranslate"><span class="pre">run.sh</span></code>.</p>
 <ul class="simple">
-<li><p><code class="docutils literal notranslate"><span 
class="pre">-epoch</span> <span class="pre">&lt;int&gt;</span></code>: number 
of epoch to be trained, default is 90;</p></li>
-<li><p><code class="docutils literal notranslate"><span class="pre">-lr</span> 
<span class="pre">&lt;float&gt;</span></code>: base learning rate, the learning 
rate will decrease each 20 epochs,
-more specifically, <code class="docutils literal notranslate"><span 
class="pre">lr</span> <span class="pre">=</span> <span class="pre">lr</span> 
<span class="pre">*</span> <span class="pre">exp(0.1</span> <span 
class="pre">*</span> <span class="pre">(epoch</span> <span class="pre">/</span> 
<span class="pre">20))</span></code>;</p></li>
-<li><p><code class="docutils literal notranslate"><span 
class="pre">-batchsize</span> <span class="pre">&lt;int&gt;</span></code>: 
batchsize, it should be changed regarding to your memory;</p></li>
-<li><p><code class="docutils literal notranslate"><span 
class="pre">-filesize</span> <span class="pre">&lt;int&gt;</span></code>: 
number of training images that stores in each binary file, it is the
-same as the <code class="docutils literal notranslate"><span 
class="pre">filesize</span></code> in data preprocessing;</p></li>
-<li><p><code class="docutils literal notranslate"><span 
class="pre">-ntrain</span> <span class="pre">&lt;int&gt;</span></code>: number 
of training images;</p></li>
-<li><p><code class="docutils literal notranslate"><span 
class="pre">-ntest</span> <span class="pre">&lt;int&gt;</span></code>: number 
of test images;</p></li>
-<li><p><code class="docutils literal notranslate"><span 
class="pre">-data</span> <span class="pre">&lt;folder&gt;</span></code>: the 
folder which stores the binary files, it is exactly the output
-folder in data preprocessing step;</p></li>
-<li><p><code class="docutils literal notranslate"><span 
class="pre">-pfreq</span> <span class="pre">&lt;int&gt;</span></code>: the 
frequency(in batch) of printing current model status(loss and 
accuracy);</p></li>
-<li><p><code class="docutils literal notranslate"><span 
class="pre">-nthreads</span> <span class="pre">&lt;int&gt;</span></code>: the 
number of threads to load data which feed to the model.</p></li>
+<li><code class="docutils literal notranslate"><span class="pre">-epoch</span> 
<span class="pre">&lt;int&gt;</span></code>: number of epoch to be trained, 
default is 90;</li>
+<li><code class="docutils literal notranslate"><span class="pre">-lr</span> 
<span class="pre">&lt;float&gt;</span></code>: base learning rate, the learning 
rate will decrease each 20 epochs,
+more specifically, <code class="docutils literal notranslate"><span 
class="pre">lr</span> <span class="pre">=</span> <span class="pre">lr</span> 
<span class="pre">*</span> <span class="pre">exp(0.1</span> <span 
class="pre">*</span> <span class="pre">(epoch</span> <span class="pre">/</span> 
<span class="pre">20))</span></code>;</li>
+<li><code class="docutils literal notranslate"><span 
class="pre">-batchsize</span> <span class="pre">&lt;int&gt;</span></code>: 
batchsize, it should be changed regarding to your memory;</li>
+<li><code class="docutils literal notranslate"><span 
class="pre">-filesize</span> <span class="pre">&lt;int&gt;</span></code>: 
number of training images that stores in each binary file, it is the
+same as the <code class="docutils literal notranslate"><span 
class="pre">filesize</span></code> in data preprocessing;</li>
+<li><code class="docutils literal notranslate"><span 
class="pre">-ntrain</span> <span class="pre">&lt;int&gt;</span></code>: number 
of training images;</li>
+<li><code class="docutils literal notranslate"><span class="pre">-ntest</span> 
<span class="pre">&lt;int&gt;</span></code>: number of test images;</li>
+<li><code class="docutils literal notranslate"><span class="pre">-data</span> 
<span class="pre">&lt;folder&gt;</span></code>: the folder which stores the 
binary files, it is exactly the output
+folder in data preprocessing step;</li>
+<li><code class="docutils literal notranslate"><span class="pre">-pfreq</span> 
<span class="pre">&lt;int&gt;</span></code>: the frequency(in batch) of 
printing current model status(loss and accuracy);</li>
+<li><code class="docutils literal notranslate"><span 
class="pre">-nthreads</span> <span class="pre">&lt;int&gt;</span></code>: the 
number of threads to load data which feed to the model.</li>
 </ul>
 </li>
 </ul>
@@ -284,7 +284,7 @@ folder in data preprocessing step;</p></
 
   <div role="contentinfo">
     <p>
-        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective owners.
+        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective 
owners..
 
     </p>
   </div>
@@ -301,17 +301,36 @@ folder in data preprocessing step;</p></
   
 
 
+  
+
+    <script type="text/javascript">
+        var DOCUMENTATION_OPTIONS = {
+            URL_ROOT:'../../../../../',
+            VERSION:'1.1.0',
+            LANGUAGE:'None',
+            COLLAPSE_INDEX:false,
+            FILE_SUFFIX:'.html',
+            HAS_SOURCE:  true,
+            SOURCELINK_SUFFIX: '.txt'
+        };
+    </script>
+      <script type="text/javascript" 
src="../../../../../_static/jquery.js"></script>
+      <script type="text/javascript" 
src="../../../../../_static/underscore.js"></script>
+      <script type="text/javascript" 
src="../../../../../_static/doctools.js"></script>
+
+  
+
+  
+  
+    <script type="text/javascript" 
src="../../../../../_static/js/theme.js"></script>
+  
+
   <script type="text/javascript">
       jQuery(function () {
           SphinxRtdTheme.Navigation.enable(true);
       });
   </script>
 
-  
-  
-    
-  
-
 <div class="rst-versions" data-toggle="rst-versions" role="note" 
aria-label="versions">
   <span class="rst-current-version" data-toggle="rst-current-version">
     <span class="fa fa-book"> incubator-singa </span>

Modified: 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/densenet/README.html
URL: 
http://svn.apache.org/viewvc/incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/densenet/README.html?rev=1862313&r1=1862312&r2=1862313&view=diff
==============================================================================
--- 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/densenet/README.html
 (original)
+++ 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/densenet/README.html
 Sat Jun 29 14:42:24 2019
@@ -9,7 +9,7 @@
   
   <meta name="viewport" content="width=device-width, initial-scale=1.0">
   
-  <title>Image Classification using DenseNet &mdash; incubator-singa 1.1.0 
documentation</title>
+  <title>–&gt; &mdash; incubator-singa 1.1.0 documentation</title>
   
 
   
@@ -18,21 +18,15 @@
   
 
   
-  <script type="text/javascript" 
src="../../../../../_static/js/modernizr.min.js"></script>
+
+  
   
     
-      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../../../" 
src="../../../../../_static/documentation_options.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/jquery.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/underscore.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/doctools.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/language_data.js"></script>
-    
-    <script type="text/javascript" 
src="../../../../../_static/js/theme.js"></script>
 
-    
+  
 
   
-  <link rel="stylesheet" href="../../../../../_static/css/theme.css" 
type="text/css" />
+    <link rel="stylesheet" href="../../../../../_static/css/theme.css" 
type="text/css" />
   <link rel="stylesheet" href="../../../../../_static/pygments.css" 
type="text/css" />
     <link rel="index" title="Index" href="../../../../../genindex.html" />
     <link rel="search" title="Search" href="../../../../../search.html" />
@@ -48,16 +42,21 @@
        }
     </style>
 
+
+  
+  <script src="../../../../../_static/js/modernizr.min.js"></script>
+
 </head>
 
 <body class="wy-body-for-nav">
 
    
   <div class="wy-grid-for-nav">
+
     
     <nav data-toggle="wy-nav-shift" class="wy-nav-side">
       <div class="wy-side-scroll">
-        <div class="wy-side-nav-search" >
+        <div class="wy-side-nav-search">
           
 
           
@@ -162,7 +161,7 @@
     
       <li><a href="../../../../../index.html">Docs</a> &raquo;</li>
         
-      <li>Image Classification using DenseNet</li>
+      <li>–&gt;</li>
     
     
       <li class="wy-breadcrumbs-aside">
@@ -186,36 +185,44 @@
     regarding copyright ownership.  The ASF licenses this file
     to you under the Apache License, Version 2.0 (the
     "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
+    with the License.  You may obtain a copy of the License at<div 
class="highlight-default notranslate"><div class="highlight"><pre><span></span> 
 <span class="n">http</span><span class="p">:</span><span 
class="o">//</span><span class="n">www</span><span class="o">.</span><span 
class="n">apache</span><span class="o">.</span><span class="n">org</span><span 
class="o">/</span><span class="n">licenses</span><span class="o">/</span><span 
class="n">LICENSE</span><span class="o">-</span><span class="mf">2.0</span>
 
-      http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
---><div class="section" id="image-classification-using-densenet">
-<h1>Image Classification using DenseNet<a class="headerlink" 
href="#image-classification-using-densenet" title="Permalink to this 
headline">¶</a></h1>
+<span class="n">Unless</span> <span class="n">required</span> <span 
class="n">by</span> <span class="n">applicable</span> <span 
class="n">law</span> <span class="ow">or</span> <span class="n">agreed</span> 
<span class="n">to</span> <span class="ow">in</span> <span 
class="n">writing</span><span class="p">,</span>
+<span class="n">software</span> <span class="n">distributed</span> <span 
class="n">under</span> <span class="n">the</span> <span 
class="n">License</span> <span class="ow">is</span> <span 
class="n">distributed</span> <span class="n">on</span> <span class="n">an</span>
+<span class="s2">&quot;AS IS&quot;</span> <span class="n">BASIS</span><span 
class="p">,</span> <span class="n">WITHOUT</span> <span 
class="n">WARRANTIES</span> <span class="n">OR</span> <span 
class="n">CONDITIONS</span> <span class="n">OF</span> <span class="n">ANY</span>
+<span class="n">KIND</span><span class="p">,</span> <span 
class="n">either</span> <span class="n">express</span> <span 
class="ow">or</span> <span class="n">implied</span><span class="o">.</span>  
<span class="n">See</span> <span class="n">the</span> <span 
class="n">License</span> <span class="k">for</span> <span class="n">the</span>
+<span class="n">specific</span> <span class="n">language</span> <span 
class="n">governing</span> <span class="n">permissions</span> <span 
class="ow">and</span> <span class="n">limitations</span>
+<span class="n">under</span> <span class="n">the</span> <span 
class="n">License</span><span class="o">.</span>
+</pre></div>
+</div>
+<div class="section" id="">
+<span id="id1"></span><h1>–&gt;<a class="headerlink" href="#" 
title="Permalink to this headline">¶</a></h1>
+<p>name: DenseNet models on ImageNet
+SINGA version: 1.1.1
+SINGA commit:
+license: 
https://github.com/pytorch/vision/blob/master/torchvision/models/densenet.py</p>
+</div>
+<hr class="docutils" />
+<div class="section" id="image-classification-using-densenet">
+<span id="image-classification-using-densenet"></span><h1>Image Classification 
using DenseNet<a class="headerlink" href="#image-classification-using-densenet" 
title="Permalink to this headline">¶</a></h1>
 <p>In this example, we convert DenseNet on <a class="reference external" 
href="https://github.com/pytorch/vision/blob/master/torchvision/models/densenet.py";>PyTorch</a>
 to SINGA for image classification.</p>
 <div class="section" id="instructions">
-<h2>Instructions<a class="headerlink" href="#instructions" title="Permalink to 
this headline">¶</a></h2>
+<span id="instructions"></span><h2>Instructions<a class="headerlink" 
href="#instructions" title="Permalink to this headline">¶</a></h2>
 <ul>
-<li><p>Download one parameter checkpoint file (see below) and the synset word 
file of ImageNet into this folder, e.g.,</p>
+<li><p class="first">Download one parameter checkpoint file (see below) and 
the synset word file of ImageNet into this folder, e.g.,</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  $ wget 
https://s3-ap-southeast-1.amazonaws.com/dlfile/densenet/densenet-121.tar.gz
   $ wget https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/synset_words.txt
   $ tar xvf densenet-121.tar.gz
 </pre></div>
 </div>
 </li>
-<li><p>Usage</p>
+<li><p class="first">Usage</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  $ python serve.py -h
 </pre></div>
 </div>
 </li>
-<li><p>Example</p>
+<li><p class="first">Example</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  # use cpu
   $ python serve.py --use_cpu --parameter_file densenet-121.pickle --depth 121 
&amp;
   # use gpu
@@ -225,7 +232,7 @@ to SINGA for image classification.</p>
 <p>The parameter files for the following model and depth configuration pairs 
are provided:
 <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/densenet/densenet-121.tar.gz";>121</a>,
 <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/densenet/densenet-169.tar.gz";>169</a>,
 <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/densenet/densenet-201.tar.gz";>201</a>,
 <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/densenet/densenet-161.tar.gz";>161</a></p>
 </li>
-<li><p>Submit images for classification</p>
+<li><p class="first">Submit images for classification</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  $ curl -i -F [email protected] 
http://localhost:9999/api
   $ curl -i -F [email protected] http://localhost:9999/api
   $ curl -i -F [email protected] http://localhost:9999/api
@@ -236,7 +243,7 @@ to SINGA for image classification.</p>
 <p>image1.jpg, image2.jpg and image3.jpg should be downloaded before executing 
the above commands.</p>
 </div>
 <div class="section" id="details">
-<h2>Details<a class="headerlink" href="#details" title="Permalink to this 
headline">¶</a></h2>
+<span id="details"></span><h2>Details<a class="headerlink" href="#details" 
title="Permalink to this headline">¶</a></h2>
 <p>The parameter files were converted from the pytorch via the convert.py 
program.</p>
 <p>Usage:</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>$ python convert.py -h
@@ -256,7 +263,7 @@ to SINGA for image classification.</p>
 
   <div role="contentinfo">
     <p>
-        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective owners.
+        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective 
owners..
 
     </p>
   </div>
@@ -273,17 +280,36 @@ to SINGA for image classification.</p>
   
 
 
+  
+
+    <script type="text/javascript">
+        var DOCUMENTATION_OPTIONS = {
+            URL_ROOT:'../../../../../',
+            VERSION:'1.1.0',
+            LANGUAGE:'None',
+            COLLAPSE_INDEX:false,
+            FILE_SUFFIX:'.html',
+            HAS_SOURCE:  true,
+            SOURCELINK_SUFFIX: '.txt'
+        };
+    </script>
+      <script type="text/javascript" 
src="../../../../../_static/jquery.js"></script>
+      <script type="text/javascript" 
src="../../../../../_static/underscore.js"></script>
+      <script type="text/javascript" 
src="../../../../../_static/doctools.js"></script>
+
+  
+
+  
+  
+    <script type="text/javascript" 
src="../../../../../_static/js/theme.js"></script>
+  
+
   <script type="text/javascript">
       jQuery(function () {
           SphinxRtdTheme.Navigation.enable(true);
       });
   </script>
 
-  
-  
-    
-  
-
 <div class="rst-versions" data-toggle="rst-versions" role="note" 
aria-label="versions">
   <span class="rst-current-version" data-toggle="rst-current-version">
     <span class="fa fa-book"> incubator-singa </span>

Modified: 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/googlenet/README.html
URL: 
http://svn.apache.org/viewvc/incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/googlenet/README.html?rev=1862313&r1=1862312&r2=1862313&view=diff
==============================================================================
--- 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/googlenet/README.html
 (original)
+++ 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/googlenet/README.html
 Sat Jun 29 14:42:24 2019
@@ -9,7 +9,7 @@
   
   <meta name="viewport" content="width=device-width, initial-scale=1.0">
   
-  <title>Image Classification using GoogleNet &mdash; incubator-singa 1.1.0 
documentation</title>
+  <title>–&gt; &mdash; incubator-singa 1.1.0 documentation</title>
   
 
   
@@ -18,21 +18,15 @@
   
 
   
-  <script type="text/javascript" 
src="../../../../../_static/js/modernizr.min.js"></script>
+
+  
   
     
-      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../../../" 
src="../../../../../_static/documentation_options.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/jquery.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/underscore.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/doctools.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/language_data.js"></script>
-    
-    <script type="text/javascript" 
src="../../../../../_static/js/theme.js"></script>
 
-    
+  
 
   
-  <link rel="stylesheet" href="../../../../../_static/css/theme.css" 
type="text/css" />
+    <link rel="stylesheet" href="../../../../../_static/css/theme.css" 
type="text/css" />
   <link rel="stylesheet" href="../../../../../_static/pygments.css" 
type="text/css" />
     <link rel="index" title="Index" href="../../../../../genindex.html" />
     <link rel="search" title="Search" href="../../../../../search.html" />
@@ -48,16 +42,21 @@
        }
     </style>
 
+
+  
+  <script src="../../../../../_static/js/modernizr.min.js"></script>
+
 </head>
 
 <body class="wy-body-for-nav">
 
    
   <div class="wy-grid-for-nav">
+
     
     <nav data-toggle="wy-nav-shift" class="wy-nav-side">
       <div class="wy-side-scroll">
-        <div class="wy-side-nav-search" >
+        <div class="wy-side-nav-search">
           
 
           
@@ -162,7 +161,7 @@
     
       <li><a href="../../../../../index.html">Docs</a> &raquo;</li>
         
-      <li>Image Classification using GoogleNet</li>
+      <li>–&gt;</li>
     
     
       <li class="wy-breadcrumbs-aside">
@@ -186,29 +185,39 @@
     regarding copyright ownership.  The ASF licenses this file
     to you under the Apache License, Version 2.0 (the
     "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
+    with the License.  You may obtain a copy of the License at<div 
class="highlight-default notranslate"><div class="highlight"><pre><span></span> 
 <span class="n">http</span><span class="p">:</span><span 
class="o">//</span><span class="n">www</span><span class="o">.</span><span 
class="n">apache</span><span class="o">.</span><span class="n">org</span><span 
class="o">/</span><span class="n">licenses</span><span class="o">/</span><span 
class="n">LICENSE</span><span class="o">-</span><span class="mf">2.0</span>
 
-      http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
---><div class="section" id="image-classification-using-googlenet">
-<h1>Image Classification using GoogleNet<a class="headerlink" 
href="#image-classification-using-googlenet" title="Permalink to this 
headline">¶</a></h1>
-<p>In this example, we convert GoogleNet trained on Caffe to SINGA for image 
classification. Tested on <a class="reference external" 
href="8c990f7da2de220e8a012c6a8ecc897dc7532744">SINGA commit</a> with <a 
class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/bvlc_googlenet.tar.gz";>the 
parameters</a>.</p>
+<span class="n">Unless</span> <span class="n">required</span> <span 
class="n">by</span> <span class="n">applicable</span> <span 
class="n">law</span> <span class="ow">or</span> <span class="n">agreed</span> 
<span class="n">to</span> <span class="ow">in</span> <span 
class="n">writing</span><span class="p">,</span>
+<span class="n">software</span> <span class="n">distributed</span> <span 
class="n">under</span> <span class="n">the</span> <span 
class="n">License</span> <span class="ow">is</span> <span 
class="n">distributed</span> <span class="n">on</span> <span class="n">an</span>
+<span class="s2">&quot;AS IS&quot;</span> <span class="n">BASIS</span><span 
class="p">,</span> <span class="n">WITHOUT</span> <span 
class="n">WARRANTIES</span> <span class="n">OR</span> <span 
class="n">CONDITIONS</span> <span class="n">OF</span> <span class="n">ANY</span>
+<span class="n">KIND</span><span class="p">,</span> <span 
class="n">either</span> <span class="n">express</span> <span 
class="ow">or</span> <span class="n">implied</span><span class="o">.</span>  
<span class="n">See</span> <span class="n">the</span> <span 
class="n">License</span> <span class="k">for</span> <span class="n">the</span>
+<span class="n">specific</span> <span class="n">language</span> <span 
class="n">governing</span> <span class="n">permissions</span> <span 
class="ow">and</span> <span class="n">limitations</span>
+<span class="n">under</span> <span class="n">the</span> <span 
class="n">License</span><span class="o">.</span>
+</pre></div>
+</div>
+<div class="section" id="">
+<span id="id1"></span><h1>–&gt;<a class="headerlink" href="#" 
title="Permalink to this headline">¶</a></h1>
+<p>name: GoogleNet on ImageNet
+SINGA version: 1.0.1
+SINGA commit: 8c990f7da2de220e8a012c6a8ecc897dc7532744
+parameter_url: 
https://s3-ap-southeast-1.amazonaws.com/dlfile/bvlc_googlenet.tar.gz
+parameter_sha1: 0a88e8948b1abca3badfd8d090d6be03f8d7655d
+license: unrestricted 
https://github.com/BVLC/caffe/tree/master/models/bvlc_googlenet</p>
+</div>
+<hr class="docutils" />
+<div class="section" id="image-classification-using-googlenet">
+<span id="image-classification-using-googlenet"></span><h1>Image 
Classification using GoogleNet<a class="headerlink" 
href="#image-classification-using-googlenet" title="Permalink to this 
headline">¶</a></h1>
+<p>In this example, we convert GoogleNet trained on Caffe to SINGA for image 
classification.</p>
 <div class="section" id="instructions">
-<h2>Instructions<a class="headerlink" href="#instructions" title="Permalink to 
this headline">¶</a></h2>
+<span id="instructions"></span><h2>Instructions<a class="headerlink" 
href="#instructions" title="Permalink to this headline">¶</a></h2>
 <ul>
-<li><p>Download the parameter checkpoint file into this folder</p>
+<li><p class="first">Download the parameter checkpoint file into this 
folder</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  $ wget 
https://s3-ap-southeast-1.amazonaws.com/dlfile/bvlc_googlenet.tar.gz
   $ tar xvf bvlc_googlenet.tar.gz
 </pre></div>
 </div>
 </li>
-<li><p>Run the program</p>
+<li><p class="first">Run the program</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  # use cpu
   $ python serve.py -C &amp;
   # use gpu
@@ -216,7 +225,7 @@
 </pre></div>
 </div>
 </li>
-<li><p>Submit images for classification</p>
+<li><p class="first">Submit images for classification</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  $ curl -i -F [email protected] 
http://localhost:9999/api
   $ curl -i -F [email protected] http://localhost:9999/api
   $ curl -i -F [email protected] http://localhost:9999/api
@@ -227,7 +236,7 @@
 <p>image1.jpg, image2.jpg and image3.jpg should be downloaded before executing 
the above commands.</p>
 </div>
 <div class="section" id="details">
-<h2>Details<a class="headerlink" href="#details" title="Permalink to this 
headline">¶</a></h2>
+<span id="details"></span><h2>Details<a class="headerlink" href="#details" 
title="Permalink to this headline">¶</a></h2>
 <p>We first extract the parameter values from <a class="reference external" 
href="http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel";>Caffe’s 
checkpoint file</a> into a pickle version
 After downloading the checkpoint file into <code class="docutils literal 
notranslate"><span class="pre">caffe_root/python</span></code> folder, run the 
following script</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span><span class="c1"># to be executed within 
caffe_root/python folder</span>
@@ -270,7 +279,7 @@ Refer to <a class="reference external" h
 
   <div role="contentinfo">
     <p>
-        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective owners.
+        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective 
owners..
 
     </p>
   </div>
@@ -287,17 +296,36 @@ Refer to <a class="reference external" h
   
 
 
+  
+
+    <script type="text/javascript">
+        var DOCUMENTATION_OPTIONS = {
+            URL_ROOT:'../../../../../',
+            VERSION:'1.1.0',
+            LANGUAGE:'None',
+            COLLAPSE_INDEX:false,
+            FILE_SUFFIX:'.html',
+            HAS_SOURCE:  true,
+            SOURCELINK_SUFFIX: '.txt'
+        };
+    </script>
+      <script type="text/javascript" 
src="../../../../../_static/jquery.js"></script>
+      <script type="text/javascript" 
src="../../../../../_static/underscore.js"></script>
+      <script type="text/javascript" 
src="../../../../../_static/doctools.js"></script>
+
+  
+
+  
+  
+    <script type="text/javascript" 
src="../../../../../_static/js/theme.js"></script>
+  
+
   <script type="text/javascript">
       jQuery(function () {
           SphinxRtdTheme.Navigation.enable(true);
       });
   </script>
 
-  
-  
-    
-  
-
 <div class="rst-versions" data-toggle="rst-versions" role="note" 
aria-label="versions">
   <span class="rst-current-version" data-toggle="rst-current-version">
     <span class="fa fa-book"> incubator-singa </span>

Modified: 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/inception/README.html
URL: 
http://svn.apache.org/viewvc/incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/inception/README.html?rev=1862313&r1=1862312&r2=1862313&view=diff
==============================================================================
--- 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/inception/README.html
 (original)
+++ 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/inception/README.html
 Sat Jun 29 14:42:24 2019
@@ -9,7 +9,7 @@
   
   <meta name="viewport" content="width=device-width, initial-scale=1.0">
   
-  <title>Image Classification using Inception V4 &mdash; incubator-singa 1.1.0 
documentation</title>
+  <title>–&gt; &mdash; incubator-singa 1.1.0 documentation</title>
   
 
   
@@ -18,21 +18,15 @@
   
 
   
-  <script type="text/javascript" 
src="../../../../../_static/js/modernizr.min.js"></script>
+
+  
   
     
-      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../../../" 
src="../../../../../_static/documentation_options.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/jquery.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/underscore.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/doctools.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/language_data.js"></script>
-    
-    <script type="text/javascript" 
src="../../../../../_static/js/theme.js"></script>
 
-    
+  
 
   
-  <link rel="stylesheet" href="../../../../../_static/css/theme.css" 
type="text/css" />
+    <link rel="stylesheet" href="../../../../../_static/css/theme.css" 
type="text/css" />
   <link rel="stylesheet" href="../../../../../_static/pygments.css" 
type="text/css" />
     <link rel="index" title="Index" href="../../../../../genindex.html" />
     <link rel="search" title="Search" href="../../../../../search.html" />
@@ -48,16 +42,21 @@
        }
     </style>
 
+
+  
+  <script src="../../../../../_static/js/modernizr.min.js"></script>
+
 </head>
 
 <body class="wy-body-for-nav">
 
    
   <div class="wy-grid-for-nav">
+
     
     <nav data-toggle="wy-nav-shift" class="wy-nav-side">
       <div class="wy-side-scroll">
-        <div class="wy-side-nav-search" >
+        <div class="wy-side-nav-search">
           
 
           
@@ -162,7 +161,7 @@
     
       <li><a href="../../../../../index.html">Docs</a> &raquo;</li>
         
-      <li>Image Classification using Inception V4</li>
+      <li>–&gt;</li>
     
     
       <li class="wy-breadcrumbs-aside">
@@ -186,30 +185,41 @@
     regarding copyright ownership.  The ASF licenses this file
     to you under the Apache License, Version 2.0 (the
     "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
+    with the License.  You may obtain a copy of the License at<div 
class="highlight-default notranslate"><div class="highlight"><pre><span></span> 
 <span class="n">http</span><span class="p">:</span><span 
class="o">//</span><span class="n">www</span><span class="o">.</span><span 
class="n">apache</span><span class="o">.</span><span class="n">org</span><span 
class="o">/</span><span class="n">licenses</span><span class="o">/</span><span 
class="n">LICENSE</span><span class="o">-</span><span class="mf">2.0</span>
 
-      http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
---><div class="section" id="image-classification-using-inception-v4">
-<h1>Image Classification using Inception V4<a class="headerlink" 
href="#image-classification-using-inception-v4" title="Permalink to this 
headline">¶</a></h1>
-<p>In this example, we convert Inception V4 trained on Tensorflow to SINGA for 
image classification. Tested on SINGA version 1.1.1 with <a class="reference 
external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/inception_v4.tar.gz";>parameters
 pretrained by tensorflow</a>.</p>
+<span class="n">Unless</span> <span class="n">required</span> <span 
class="n">by</span> <span class="n">applicable</span> <span 
class="n">law</span> <span class="ow">or</span> <span class="n">agreed</span> 
<span class="n">to</span> <span class="ow">in</span> <span 
class="n">writing</span><span class="p">,</span>
+<span class="n">software</span> <span class="n">distributed</span> <span 
class="n">under</span> <span class="n">the</span> <span 
class="n">License</span> <span class="ow">is</span> <span 
class="n">distributed</span> <span class="n">on</span> <span class="n">an</span>
+<span class="s2">&quot;AS IS&quot;</span> <span class="n">BASIS</span><span 
class="p">,</span> <span class="n">WITHOUT</span> <span 
class="n">WARRANTIES</span> <span class="n">OR</span> <span 
class="n">CONDITIONS</span> <span class="n">OF</span> <span class="n">ANY</span>
+<span class="n">KIND</span><span class="p">,</span> <span 
class="n">either</span> <span class="n">express</span> <span 
class="ow">or</span> <span class="n">implied</span><span class="o">.</span>  
<span class="n">See</span> <span class="n">the</span> <span 
class="n">License</span> <span class="k">for</span> <span class="n">the</span>
+<span class="n">specific</span> <span class="n">language</span> <span 
class="n">governing</span> <span class="n">permissions</span> <span 
class="ow">and</span> <span class="n">limitations</span>
+<span class="n">under</span> <span class="n">the</span> <span 
class="n">License</span><span class="o">.</span>
+</pre></div>
+</div>
+<div class="section" id="">
+<span id="id1"></span><h1>–&gt;<a class="headerlink" href="#" 
title="Permalink to this headline">¶</a></h1>
+<p>name: Inception V4 on ImageNet
+SINGA version: 1.1.1
+SINGA commit:
+parameter_url: 
https://s3-ap-southeast-1.amazonaws.com/dlfile/inception_v4.tar.gz
+parameter_sha1: 5fdd6f5d8af8fd10e7321d9b38bb87ef14e80d56
+license: https://github.com/tensorflow/models/tree/master/slim</p>
+</div>
+<hr class="docutils" />
+<div class="section" id="image-classification-using-inception-v4">
+<span id="image-classification-using-inception-v4"></span><h1>Image 
Classification using Inception V4<a class="headerlink" 
href="#image-classification-using-inception-v4" title="Permalink to this 
headline">¶</a></h1>
+<p>In this example, we convert Inception V4 trained on Tensorflow to SINGA for 
image classification.</p>
 <div class="section" id="instructions">
-<h2>Instructions<a class="headerlink" href="#instructions" title="Permalink to 
this headline">¶</a></h2>
+<span id="instructions"></span><h2>Instructions<a class="headerlink" 
href="#instructions" title="Permalink to this headline">¶</a></h2>
 <ul>
-<li><p>Download the parameter checkpoint file</p>
+<li><p class="first">Download the parameter checkpoint file</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  $ wget
   $ tar xvf inception_v4.tar.gz
 </pre></div>
 </div>
 </li>
-<li><p>Download <a class="reference external" 
href="https://github.com/BVLC/caffe/blob/master/data/ilsvrc12/get_ilsvrc_aux.sh";>synset_word.txt</a>
 file.</p></li>
-<li><p>Run the program</p>
+<li><p class="first">Download <a class="reference external" 
href="https://github.com/BVLC/caffe/blob/master/data/ilsvrc12/get_ilsvrc_aux.sh";>synset_word.txt</a>
 file.</p>
+</li>
+<li><p class="first">Run the program</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  # use cpu
   $ python serve.py -C &amp;
   # use gpu
@@ -217,7 +227,7 @@
 </pre></div>
 </div>
 </li>
-<li><p>Submit images for classification</p>
+<li><p class="first">Submit images for classification</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  $ curl -i -F [email protected] 
http://localhost:9999/api
   $ curl -i -F [email protected] http://localhost:9999/api
   $ curl -i -F [email protected] http://localhost:9999/api
@@ -228,7 +238,7 @@
 <p>image1.jpg, image2.jpg and image3.jpg should be downloaded before executing 
the above commands.</p>
 </div>
 <div class="section" id="details">
-<h2>Details<a class="headerlink" href="#details" title="Permalink to this 
headline">¶</a></h2>
+<span id="details"></span><h2>Details<a class="headerlink" href="#details" 
title="Permalink to this headline">¶</a></h2>
 <p>We first extract the parameter values from <a class="reference external" 
href="http://download.tensorflow.org/models/inception_v4_2016_09_09.tar.gz";>Tensorflow’s
 checkpoint file</a> into a pickle version.
 After downloading and decompressing the checkpoint file, run the following 
script</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>$ python convert.py 
--file_name=inception_v4.ckpt
@@ -248,7 +258,7 @@ After downloading and decompressing the
 
   <div role="contentinfo">
     <p>
-        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective owners.
+        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective 
owners..
 
     </p>
   </div>
@@ -265,17 +275,36 @@ After downloading and decompressing the
   
 
 
+  
+
+    <script type="text/javascript">
+        var DOCUMENTATION_OPTIONS = {
+            URL_ROOT:'../../../../../',
+            VERSION:'1.1.0',
+            LANGUAGE:'None',
+            COLLAPSE_INDEX:false,
+            FILE_SUFFIX:'.html',
+            HAS_SOURCE:  true,
+            SOURCELINK_SUFFIX: '.txt'
+        };
+    </script>
+      <script type="text/javascript" 
src="../../../../../_static/jquery.js"></script>
+      <script type="text/javascript" 
src="../../../../../_static/underscore.js"></script>
+      <script type="text/javascript" 
src="../../../../../_static/doctools.js"></script>
+
+  
+
+  
+  
+    <script type="text/javascript" 
src="../../../../../_static/js/theme.js"></script>
+  
+
   <script type="text/javascript">
       jQuery(function () {
           SphinxRtdTheme.Navigation.enable(true);
       });
   </script>
 
-  
-  
-    
-  
-
 <div class="rst-versions" data-toggle="rst-versions" role="note" 
aria-label="versions">
   <span class="rst-current-version" data-toggle="rst-current-version">
     <span class="fa fa-book"> incubator-singa </span>

Modified: 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/resnet/README.html
URL: 
http://svn.apache.org/viewvc/incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/resnet/README.html?rev=1862313&r1=1862312&r2=1862313&view=diff
==============================================================================
--- 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/resnet/README.html
 (original)
+++ 
incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/resnet/README.html
 Sat Jun 29 14:42:24 2019
@@ -9,7 +9,7 @@
   
   <meta name="viewport" content="width=device-width, initial-scale=1.0">
   
-  <title>Image Classification using Residual Networks &mdash; incubator-singa 
1.1.0 documentation</title>
+  <title>–&gt; &mdash; incubator-singa 1.1.0 documentation</title>
   
 
   
@@ -18,21 +18,15 @@
   
 
   
-  <script type="text/javascript" 
src="../../../../../_static/js/modernizr.min.js"></script>
+
+  
   
     
-      <script type="text/javascript" id="documentation_options" 
data-url_root="../../../../../" 
src="../../../../../_static/documentation_options.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/jquery.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/underscore.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/doctools.js"></script>
-        <script type="text/javascript" 
src="../../../../../_static/language_data.js"></script>
-    
-    <script type="text/javascript" 
src="../../../../../_static/js/theme.js"></script>
 
-    
+  
 
   
-  <link rel="stylesheet" href="../../../../../_static/css/theme.css" 
type="text/css" />
+    <link rel="stylesheet" href="../../../../../_static/css/theme.css" 
type="text/css" />
   <link rel="stylesheet" href="../../../../../_static/pygments.css" 
type="text/css" />
     <link rel="index" title="Index" href="../../../../../genindex.html" />
     <link rel="search" title="Search" href="../../../../../search.html" />
@@ -48,16 +42,21 @@
        }
     </style>
 
+
+  
+  <script src="../../../../../_static/js/modernizr.min.js"></script>
+
 </head>
 
 <body class="wy-body-for-nav">
 
    
   <div class="wy-grid-for-nav">
+
     
     <nav data-toggle="wy-nav-shift" class="wy-nav-side">
       <div class="wy-side-scroll">
-        <div class="wy-side-nav-search" >
+        <div class="wy-side-nav-search">
           
 
           
@@ -162,7 +161,7 @@
     
       <li><a href="../../../../../index.html">Docs</a> &raquo;</li>
         
-      <li>Image Classification using Residual Networks</li>
+      <li>–&gt;</li>
     
     
       <li class="wy-breadcrumbs-aside">
@@ -186,35 +185,44 @@
     regarding copyright ownership.  The ASF licenses this file
     to you under the Apache License, Version 2.0 (the
     "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
+    with the License.  You may obtain a copy of the License at<div 
class="highlight-default notranslate"><div class="highlight"><pre><span></span> 
 <span class="n">http</span><span class="p">:</span><span 
class="o">//</span><span class="n">www</span><span class="o">.</span><span 
class="n">apache</span><span class="o">.</span><span class="n">org</span><span 
class="o">/</span><span class="n">licenses</span><span class="o">/</span><span 
class="n">LICENSE</span><span class="o">-</span><span class="mf">2.0</span>
 
-      http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
---><div class="section" id="image-classification-using-residual-networks">
-<h1>Image Classification using Residual Networks<a class="headerlink" 
href="#image-classification-using-residual-networks" title="Permalink to this 
headline">¶</a></h1>
-<p>In this example, we convert Residual Networks trained on <a 
class="reference external" 
href="https://github.com/facebook/fb.resnet.torch";>Torch</a> to SINGA for image 
classification. Tested on [SINGA commit] with the <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-18.tar.gz";>parameters
 pretrained by Torch</a></p>
+<span class="n">Unless</span> <span class="n">required</span> <span 
class="n">by</span> <span class="n">applicable</span> <span 
class="n">law</span> <span class="ow">or</span> <span class="n">agreed</span> 
<span class="n">to</span> <span class="ow">in</span> <span 
class="n">writing</span><span class="p">,</span>
+<span class="n">software</span> <span class="n">distributed</span> <span 
class="n">under</span> <span class="n">the</span> <span 
class="n">License</span> <span class="ow">is</span> <span 
class="n">distributed</span> <span class="n">on</span> <span class="n">an</span>
+<span class="s2">&quot;AS IS&quot;</span> <span class="n">BASIS</span><span 
class="p">,</span> <span class="n">WITHOUT</span> <span 
class="n">WARRANTIES</span> <span class="n">OR</span> <span 
class="n">CONDITIONS</span> <span class="n">OF</span> <span class="n">ANY</span>
+<span class="n">KIND</span><span class="p">,</span> <span 
class="n">either</span> <span class="n">express</span> <span 
class="ow">or</span> <span class="n">implied</span><span class="o">.</span>  
<span class="n">See</span> <span class="n">the</span> <span 
class="n">License</span> <span class="k">for</span> <span class="n">the</span>
+<span class="n">specific</span> <span class="n">language</span> <span 
class="n">governing</span> <span class="n">permissions</span> <span 
class="ow">and</span> <span class="n">limitations</span>
+<span class="n">under</span> <span class="n">the</span> <span 
class="n">License</span><span class="o">.</span>
+</pre></div>
+</div>
+<div class="section" id="">
+<span id="id1"></span><h1>–&gt;<a class="headerlink" href="#" 
title="Permalink to this headline">¶</a></h1>
+<p>name: Resnets on ImageNet
+SINGA version: 1.1
+SINGA commit: 45ec92d8ffc1fa1385a9307fdf07e21da939ee2f
+parameter_url: 
https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-18.tar.gz
+license: Apache V2, 
https://github.com/facebook/fb.resnet.torch/blob/master/LICENSE</p>
+</div>
+<hr class="docutils" />
+<div class="section" id="image-classification-using-residual-networks">
+<span id="image-classification-using-residual-networks"></span><h1>Image 
Classification using Residual Networks<a class="headerlink" 
href="#image-classification-using-residual-networks" title="Permalink to this 
headline">¶</a></h1>
+<p>In this example, we convert Residual Networks trained on <a 
class="reference external" 
href="https://github.com/facebook/fb.resnet.torch";>Torch</a> to SINGA for image 
classification.</p>
 <div class="section" id="instructions">
-<h2>Instructions<a class="headerlink" href="#instructions" title="Permalink to 
this headline">¶</a></h2>
+<span id="instructions"></span><h2>Instructions<a class="headerlink" 
href="#instructions" title="Permalink to this headline">¶</a></h2>
 <ul>
-<li><p>Download one parameter checkpoint file (see below) and the synset word 
file of ImageNet into this folder, e.g.,</p>
+<li><p class="first">Download one parameter checkpoint file (see below) and 
the synset word file of ImageNet into this folder, e.g.,</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  $ wget 
https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-18.tar.gz
   $ wget https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/synset_words.txt
   $ tar xvf resnet-18.tar.gz
 </pre></div>
 </div>
 </li>
-<li><p>Usage</p>
+<li><p class="first">Usage</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  $ python serve.py -h
 </pre></div>
 </div>
 </li>
-<li><p>Example</p>
+<li><p class="first">Example</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  # use cpu
   $ python serve.py --use_cpu --parameter_file resnet-18.pickle --model resnet 
--depth 18 &amp;
   # use gpu
@@ -223,13 +231,13 @@
 </div>
 <p>The parameter files for the following model and depth configuration pairs 
are provided:</p>
 <ul class="simple">
-<li><p>resnet (original resnet), <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-18.tar.gz";>18</a>|<a
 class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-34.tar.gz";>34</a>|<a
 class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-101.tar.gz";>101</a>|<a
 class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-152.tar.gz";>152</a></p></li>
-<li><p>addbn (resnet with a batch normalization layer after the addition), <a 
class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-50.tar.gz";>50</a></p></li>
-<li><p>wrn (wide resnet), <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/wrn-50-2.tar.gz";>50</a></p></li>
-<li><p>preact (resnet with pre-activation) <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-200.tar.gz";>200</a></p></li>
+<li>resnet (original resnet), <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-18.tar.gz";>18</a>|<a
 class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-34.tar.gz";>34</a>|<a
 class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-101.tar.gz";>101</a>|<a
 class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-152.tar.gz";>152</a></li>
+<li>addbn (resnet with a batch normalization layer after the addition), <a 
class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-50.tar.gz";>50</a></li>
+<li>wrn (wide resnet), <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/wrn-50-2.tar.gz";>50</a></li>
+<li>preact (resnet with pre-activation) <a class="reference external" 
href="https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/resnet-200.tar.gz";>200</a></li>
 </ul>
 </li>
-<li><p>Submit images for classification</p>
+<li><p class="first">Submit images for classification</p>
 <div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span>  $ curl -i -F [email protected] 
http://localhost:9999/api
   $ curl -i -F [email protected] http://localhost:9999/api
   $ curl -i -F [email protected] http://localhost:9999/api
@@ -240,7 +248,7 @@
 <p>image1.jpg, image2.jpg and image3.jpg should be downloaded before executing 
the above commands.</p>
 </div>
 <div class="section" id="details">
-<h2>Details<a class="headerlink" href="#details" title="Permalink to this 
headline">¶</a></h2>
+<span id="details"></span><h2>Details<a class="headerlink" href="#details" 
title="Permalink to this headline">¶</a></h2>
 <p>The parameter files were extracted from the original <a class="reference 
external" 
href="https://github.com/facebook/fb.resnet.torch/tree/master/pretrained";>torch 
files</a> via
 the convert.py program.</p>
 <p>Usage:</p>
@@ -261,7 +269,7 @@ the convert.py program.</p>
 
   <div role="contentinfo">
     <p>
-        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective owners.
+        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective 
owners..
 
     </p>
   </div>
@@ -278,17 +286,36 @@ the convert.py program.</p>
   
 
 
+  
+
+    <script type="text/javascript">
+        var DOCUMENTATION_OPTIONS = {
+            URL_ROOT:'../../../../../',
+            VERSION:'1.1.0',
+            LANGUAGE:'None',
+            COLLAPSE_INDEX:false,
+            FILE_SUFFIX:'.html',
+            HAS_SOURCE:  true,
+            SOURCELINK_SUFFIX: '.txt'
+        };
+    </script>
+      <script type="text/javascript" 
src="../../../../../_static/jquery.js"></script>
+      <script type="text/javascript" 
src="../../../../../_static/underscore.js"></script>
+      <script type="text/javascript" 
src="../../../../../_static/doctools.js"></script>
+
+  
+
+  
+  
+    <script type="text/javascript" 
src="../../../../../_static/js/theme.js"></script>
+  
+
   <script type="text/javascript">
       jQuery(function () {
           SphinxRtdTheme.Navigation.enable(true);
       });
   </script>
 
-  
-  
-    
-  
-
 <div class="rst-versions" data-toggle="rst-versions" role="note" 
aria-label="versions">
   <span class="rst-current-version" data-toggle="rst-current-version">
     <span class="fa fa-book"> incubator-singa </span>


Reply via email to