Modified: incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/vgg/README.html URL: http://svn.apache.org/viewvc/incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/vgg/README.html?rev=1862313&r1=1862312&r2=1862313&view=diff ============================================================================== --- incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/vgg/README.html (original) +++ incubator/singa/site/trunk/en/docs/model_zoo/examples/imagenet/vgg/README.html Sat Jun 29 14:42:24 2019 @@ -9,7 +9,7 @@ <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>Image Classification using VGG — incubator-singa 1.1.0 documentation</title> + <title>â> — incubator-singa 1.1.0 documentation</title> @@ -18,21 +18,15 @@ - <script type="text/javascript" src="../../../../../_static/js/modernizr.min.js"></script> + + - <script type="text/javascript" id="documentation_options" data-url_root="../../../../../" src="../../../../../_static/documentation_options.js"></script> - <script type="text/javascript" src="../../../../../_static/jquery.js"></script> - <script type="text/javascript" src="../../../../../_static/underscore.js"></script> - <script type="text/javascript" src="../../../../../_static/doctools.js"></script> - <script type="text/javascript" src="../../../../../_static/language_data.js"></script> - - <script type="text/javascript" src="../../../../../_static/js/theme.js"></script> - + - <link rel="stylesheet" href="../../../../../_static/css/theme.css" type="text/css" /> + <link rel="stylesheet" href="../../../../../_static/css/theme.css" type="text/css" /> <link rel="stylesheet" href="../../../../../_static/pygments.css" type="text/css" /> <link rel="index" title="Index" href="../../../../../genindex.html" /> <link rel="search" title="Search" href="../../../../../search.html" /> @@ -48,16 +42,21 @@ } </style> + + + <script src="../../../../../_static/js/modernizr.min.js"></script> + </head> <body class="wy-body-for-nav"> <div class="wy-grid-for-nav"> + <nav data-toggle="wy-nav-shift" class="wy-nav-side"> <div class="wy-side-scroll"> - <div class="wy-side-nav-search" > + <div class="wy-side-nav-search"> @@ -162,7 +161,7 @@ <li><a href="../../../../../index.html">Docs</a> »</li> - <li>Image Classification using VGG</li> + <li>â></li> <li class="wy-breadcrumbs-aside"> @@ -186,36 +185,44 @@ regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at + with the License. You may obtain a copy of the License at<div class="highlight-default notranslate"><div class="highlight"><pre><span></span> <span class="n">http</span><span class="p">:</span><span class="o">//</span><span class="n">www</span><span class="o">.</span><span class="n">apache</span><span class="o">.</span><span class="n">org</span><span class="o">/</span><span class="n">licenses</span><span class="o">/</span><span class="n">LICENSE</span><span class="o">-</span><span class="mf">2.0</span> - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. ---><div class="section" id="image-classification-using-vgg"> -<h1>Image Classification using VGG<a class="headerlink" href="#image-classification-using-vgg" title="Permalink to this headline">¶</a></h1> +<span class="n">Unless</span> <span class="n">required</span> <span class="n">by</span> <span class="n">applicable</span> <span class="n">law</span> <span class="ow">or</span> <span class="n">agreed</span> <span class="n">to</span> <span class="ow">in</span> <span class="n">writing</span><span class="p">,</span> +<span class="n">software</span> <span class="n">distributed</span> <span class="n">under</span> <span class="n">the</span> <span class="n">License</span> <span class="ow">is</span> <span class="n">distributed</span> <span class="n">on</span> <span class="n">an</span> +<span class="s2">"AS IS"</span> <span class="n">BASIS</span><span class="p">,</span> <span class="n">WITHOUT</span> <span class="n">WARRANTIES</span> <span class="n">OR</span> <span class="n">CONDITIONS</span> <span class="n">OF</span> <span class="n">ANY</span> +<span class="n">KIND</span><span class="p">,</span> <span class="n">either</span> <span class="n">express</span> <span class="ow">or</span> <span class="n">implied</span><span class="o">.</span> <span class="n">See</span> <span class="n">the</span> <span class="n">License</span> <span class="k">for</span> <span class="n">the</span> +<span class="n">specific</span> <span class="n">language</span> <span class="n">governing</span> <span class="n">permissions</span> <span class="ow">and</span> <span class="n">limitations</span> +<span class="n">under</span> <span class="n">the</span> <span class="n">License</span><span class="o">.</span> +</pre></div> +</div> +<div class="section" id=""> +<span id="id1"></span><h1>â><a class="headerlink" href="#" title="Permalink to this headline">¶</a></h1> +<p>name: VGG models on ImageNet +SINGA version: 1.1.1 +SINGA commit: +license: https://github.com/pytorch/vision/blob/master/torchvision/models/vgg.py</p> +</div> +<hr class="docutils" /> +<div class="section" id="image-classification-using-vgg"> +<span id="image-classification-using-vgg"></span><h1>Image Classification using VGG<a class="headerlink" href="#image-classification-using-vgg" title="Permalink to this headline">¶</a></h1> <p>In this example, we convert VGG on <a class="reference external" href="https://github.com/pytorch/vision/blob/master/torchvision/models/vgg.py">PyTorch</a> to SINGA for image classification.</p> <div class="section" id="instructions"> -<h2>Instructions<a class="headerlink" href="#instructions" title="Permalink to this headline">¶</a></h2> +<span id="instructions"></span><h2>Instructions<a class="headerlink" href="#instructions" title="Permalink to this headline">¶</a></h2> <ul> -<li><p>Download one parameter checkpoint file (see below) and the synset word file of ImageNet into this folder, e.g.,</p> +<li><p class="first">Download one parameter checkpoint file (see below) and the synset word file of ImageNet into this folder, e.g.,</p> <div class="highlight-default notranslate"><div class="highlight"><pre><span></span> $ wget https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg11.tar.gz $ wget https://s3-ap-southeast-1.amazonaws.com/dlfile/resnet/synset_words.txt $ tar xvf vgg11.tar.gz </pre></div> </div> </li> -<li><p>Usage</p> +<li><p class="first">Usage</p> <div class="highlight-default notranslate"><div class="highlight"><pre><span></span> $ python serve.py -h </pre></div> </div> </li> -<li><p>Example</p> +<li><p class="first">Example</p> <div class="highlight-default notranslate"><div class="highlight"><pre><span></span> # use cpu $ python serve.py --use_cpu --parameter_file vgg11.pickle --depth 11 & # use gpu @@ -224,11 +231,11 @@ to SINGA for image classification.</p> </div> <p>The parameter files for the following model and depth configuration pairs are provided:</p> <ul class="simple"> -<li><p>Without batch-normalization, <a class="reference external" href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg11.tar.gz">11</a>, <a class="reference external" href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg13.tar.gz">13</a>, <a class="reference external" href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg16.tar.gz">16</a>, <a class="reference external" href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg19.tar.gz">19</a></p></li> -<li><p>With batch-normalization, <a class="reference external" href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg11_bn.tar.gz">11</a>, <a class="reference external" href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg13_bn.tar.gz">13</a>, <a class="reference external" href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg16_bn.tar.gz">16</a>, <a class="reference external" href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg19_bn.tar.gz">19</a></p></li> +<li>Without batch-normalization, <a class="reference external" href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg11.tar.gz">11</a>, <a class="reference external" href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg13.tar.gz">13</a>, <a class="reference external" href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg16.tar.gz">16</a>, <a class="reference external" href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg19.tar.gz">19</a></li> +<li>With batch-normalization, <a class="reference external" href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg11_bn.tar.gz">11</a>, <a class="reference external" href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg13_bn.tar.gz">13</a>, <a class="reference external" href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg16_bn.tar.gz">16</a>, <a class="reference external" href="https://s3-ap-southeast-1.amazonaws.com/dlfile/vgg/vgg19_bn.tar.gz">19</a></li> </ul> </li> -<li><p>Submit images for classification</p> +<li><p class="first">Submit images for classification</p> <div class="highlight-default notranslate"><div class="highlight"><pre><span></span> $ curl -i -F [email protected] http://localhost:9999/api $ curl -i -F [email protected] http://localhost:9999/api $ curl -i -F [email protected] http://localhost:9999/api @@ -239,7 +246,7 @@ to SINGA for image classification.</p> <p>image1.jpg, image2.jpg and image3.jpg should be downloaded before executing the above commands.</p> </div> <div class="section" id="details"> -<h2>Details<a class="headerlink" href="#details" title="Permalink to this headline">¶</a></h2> +<span id="details"></span><h2>Details<a class="headerlink" href="#details" title="Permalink to this headline">¶</a></h2> <p>The parameter files were converted from the pytorch via the convert.py program.</p> <p>Usage:</p> <div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ python convert.py -h @@ -259,7 +266,7 @@ to SINGA for image classification.</p> <div role="contentinfo"> <p> - © Copyright 2019 The Apache Software Foundation. All rights reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA project logos are trademarks of The Apache Software Foundation. All other marks mentioned may be trademarks or registered trademarks of their respective owners. + © Copyright 2019 The Apache Software Foundation. All rights reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA project logos are trademarks of The Apache Software Foundation. All other marks mentioned may be trademarks or registered trademarks of their respective owners.. </p> </div> @@ -276,17 +283,36 @@ to SINGA for image classification.</p> + + + <script type="text/javascript"> + var DOCUMENTATION_OPTIONS = { + URL_ROOT:'../../../../../', + VERSION:'1.1.0', + LANGUAGE:'None', + COLLAPSE_INDEX:false, + FILE_SUFFIX:'.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt' + }; + </script> + <script type="text/javascript" src="../../../../../_static/jquery.js"></script> + <script type="text/javascript" src="../../../../../_static/underscore.js"></script> + <script type="text/javascript" src="../../../../../_static/doctools.js"></script> + + + + + + <script type="text/javascript" src="../../../../../_static/js/theme.js"></script> + + <script type="text/javascript"> jQuery(function () { SphinxRtdTheme.Navigation.enable(true); }); </script> - - - - - <div class="rst-versions" data-toggle="rst-versions" role="note" aria-label="versions"> <span class="rst-current-version" data-toggle="rst-current-version"> <span class="fa fa-book"> incubator-singa </span>
Modified: incubator/singa/site/trunk/en/docs/model_zoo/examples/index.html URL: http://svn.apache.org/viewvc/incubator/singa/site/trunk/en/docs/model_zoo/examples/index.html?rev=1862313&r1=1862312&r2=1862313&view=diff ============================================================================== --- incubator/singa/site/trunk/en/docs/model_zoo/examples/index.html (original) +++ incubator/singa/site/trunk/en/docs/model_zoo/examples/index.html Sat Jun 29 14:42:24 2019 @@ -18,21 +18,15 @@ - <script type="text/javascript" src="../../../_static/js/modernizr.min.js"></script> + + - <script type="text/javascript" id="documentation_options" data-url_root="../../../" src="../../../_static/documentation_options.js"></script> - <script type="text/javascript" src="../../../_static/jquery.js"></script> - <script type="text/javascript" src="../../../_static/underscore.js"></script> - <script type="text/javascript" src="../../../_static/doctools.js"></script> - <script type="text/javascript" src="../../../_static/language_data.js"></script> - - <script type="text/javascript" src="../../../_static/js/theme.js"></script> - + - <link rel="stylesheet" href="../../../_static/css/theme.css" type="text/css" /> + <link rel="stylesheet" href="../../../_static/css/theme.css" type="text/css" /> <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" /> <link rel="index" title="Index" href="../../../genindex.html" /> <link rel="search" title="Search" href="../../../search.html" /> @@ -48,16 +42,21 @@ } </style> + + + <script src="../../../_static/js/modernizr.min.js"></script> + </head> <body class="wy-body-for-nav"> <div class="wy-grid-for-nav"> + <nav data-toggle="wy-nav-shift" class="wy-nav-side"> <div class="wy-side-scroll"> - <div class="wy-side-nav-search" > + <div class="wy-side-nav-search"> @@ -211,27 +210,32 @@ </li> </ul> </li> -<li class="toctree-l1"><a class="reference internal" href="imagenet/densenet/README.html">Image Classification using DenseNet</a><ul> +<li class="toctree-l1"><a class="reference internal" href="imagenet/densenet/README.html">â></a></li> +<li class="toctree-l1"><a class="reference internal" href="imagenet/densenet/README.html#image-classification-using-densenet">Image Classification using DenseNet</a><ul> <li class="toctree-l2"><a class="reference internal" href="imagenet/densenet/README.html#instructions">Instructions</a></li> <li class="toctree-l2"><a class="reference internal" href="imagenet/densenet/README.html#details">Details</a></li> </ul> </li> -<li class="toctree-l1"><a class="reference internal" href="imagenet/googlenet/README.html">Image Classification using GoogleNet</a><ul> +<li class="toctree-l1"><a class="reference internal" href="imagenet/googlenet/README.html">â></a></li> +<li class="toctree-l1"><a class="reference internal" href="imagenet/googlenet/README.html#image-classification-using-googlenet">Image Classification using GoogleNet</a><ul> <li class="toctree-l2"><a class="reference internal" href="imagenet/googlenet/README.html#instructions">Instructions</a></li> <li class="toctree-l2"><a class="reference internal" href="imagenet/googlenet/README.html#details">Details</a></li> </ul> </li> -<li class="toctree-l1"><a class="reference internal" href="imagenet/inception/README.html">Image Classification using Inception V4</a><ul> +<li class="toctree-l1"><a class="reference internal" href="imagenet/inception/README.html">â></a></li> +<li class="toctree-l1"><a class="reference internal" href="imagenet/inception/README.html#image-classification-using-inception-v4">Image Classification using Inception V4</a><ul> <li class="toctree-l2"><a class="reference internal" href="imagenet/inception/README.html#instructions">Instructions</a></li> <li class="toctree-l2"><a class="reference internal" href="imagenet/inception/README.html#details">Details</a></li> </ul> </li> -<li class="toctree-l1"><a class="reference internal" href="imagenet/resnet/README.html">Image Classification using Residual Networks</a><ul> +<li class="toctree-l1"><a class="reference internal" href="imagenet/resnet/README.html">â></a></li> +<li class="toctree-l1"><a class="reference internal" href="imagenet/resnet/README.html#image-classification-using-residual-networks">Image Classification using Residual Networks</a><ul> <li class="toctree-l2"><a class="reference internal" href="imagenet/resnet/README.html#instructions">Instructions</a></li> <li class="toctree-l2"><a class="reference internal" href="imagenet/resnet/README.html#details">Details</a></li> </ul> </li> -<li class="toctree-l1"><a class="reference internal" href="imagenet/vgg/README.html">Image Classification using VGG</a><ul> +<li class="toctree-l1"><a class="reference internal" href="imagenet/vgg/README.html">â></a></li> +<li class="toctree-l1"><a class="reference internal" href="imagenet/vgg/README.html#image-classification-using-vgg">Image Classification using VGG</a><ul> <li class="toctree-l2"><a class="reference internal" href="imagenet/vgg/README.html#instructions">Instructions</a></li> <li class="toctree-l2"><a class="reference internal" href="imagenet/vgg/README.html#details">Details</a></li> </ul> @@ -251,7 +255,7 @@ <div role="contentinfo"> <p> - © Copyright 2019 The Apache Software Foundation. All rights reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA project logos are trademarks of The Apache Software Foundation. All other marks mentioned may be trademarks or registered trademarks of their respective owners. + © Copyright 2019 The Apache Software Foundation. All rights reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA project logos are trademarks of The Apache Software Foundation. All other marks mentioned may be trademarks or registered trademarks of their respective owners.. </p> </div> @@ -268,17 +272,36 @@ + + + <script type="text/javascript"> + var DOCUMENTATION_OPTIONS = { + URL_ROOT:'../../../', + VERSION:'1.1.0', + LANGUAGE:'None', + COLLAPSE_INDEX:false, + FILE_SUFFIX:'.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt' + }; + </script> + <script type="text/javascript" src="../../../_static/jquery.js"></script> + <script type="text/javascript" src="../../../_static/underscore.js"></script> + <script type="text/javascript" src="../../../_static/doctools.js"></script> + + + + + + <script type="text/javascript" src="../../../_static/js/theme.js"></script> + + <script type="text/javascript"> jQuery(function () { SphinxRtdTheme.Navigation.enable(true); }); </script> - - - - - <div class="rst-versions" data-toggle="rst-versions" role="note" aria-label="versions"> <span class="rst-current-version" data-toggle="rst-current-version"> <span class="fa fa-book"> incubator-singa </span> Modified: incubator/singa/site/trunk/en/docs/model_zoo/examples/mnist/README.html URL: http://svn.apache.org/viewvc/incubator/singa/site/trunk/en/docs/model_zoo/examples/mnist/README.html?rev=1862313&r1=1862312&r2=1862313&view=diff ============================================================================== --- incubator/singa/site/trunk/en/docs/model_zoo/examples/mnist/README.html (original) +++ incubator/singa/site/trunk/en/docs/model_zoo/examples/mnist/README.html Sat Jun 29 14:42:24 2019 @@ -18,21 +18,15 @@ - <script type="text/javascript" src="../../../../_static/js/modernizr.min.js"></script> + + - <script type="text/javascript" id="documentation_options" data-url_root="../../../../" src="../../../../_static/documentation_options.js"></script> - <script type="text/javascript" src="../../../../_static/jquery.js"></script> - <script type="text/javascript" src="../../../../_static/underscore.js"></script> - <script type="text/javascript" src="../../../../_static/doctools.js"></script> - <script type="text/javascript" src="../../../../_static/language_data.js"></script> - - <script type="text/javascript" src="../../../../_static/js/theme.js"></script> - + - <link rel="stylesheet" href="../../../../_static/css/theme.css" type="text/css" /> + <link rel="stylesheet" href="../../../../_static/css/theme.css" type="text/css" /> <link rel="stylesheet" href="../../../../_static/pygments.css" type="text/css" /> <link rel="index" title="Index" href="../../../../genindex.html" /> <link rel="search" title="Search" href="../../../../search.html" /> @@ -48,16 +42,21 @@ } </style> + + + <script src="../../../../_static/js/modernizr.min.js"></script> + </head> <body class="wy-body-for-nav"> <div class="wy-grid-for-nav"> + <nav data-toggle="wy-nav-shift" class="wy-nav-side"> <div class="wy-side-scroll"> - <div class="wy-side-nav-search" > + <div class="wy-side-nav-search"> @@ -186,26 +185,28 @@ regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at + with the License. You may obtain a copy of the License at<div class="highlight-default notranslate"><div class="highlight"><pre><span></span> <span class="n">http</span><span class="p">:</span><span class="o">//</span><span class="n">www</span><span class="o">.</span><span class="n">apache</span><span class="o">.</span><span class="n">org</span><span class="o">/</span><span class="n">licenses</span><span class="o">/</span><span class="n">LICENSE</span><span class="o">-</span><span class="mf">2.0</span> - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. ---><div class="section" id="train-a-rbm-model-against-mnist-dataset"> -<h1>Train a RBM model against MNIST dataset<a class="headerlink" href="#train-a-rbm-model-against-mnist-dataset" title="Permalink to this headline">¶</a></h1> +<span class="n">Unless</span> <span class="n">required</span> <span class="n">by</span> <span class="n">applicable</span> <span class="n">law</span> <span class="ow">or</span> <span class="n">agreed</span> <span class="n">to</span> <span class="ow">in</span> <span class="n">writing</span><span class="p">,</span> +<span class="n">software</span> <span class="n">distributed</span> <span class="n">under</span> <span class="n">the</span> <span class="n">License</span> <span class="ow">is</span> <span class="n">distributed</span> <span class="n">on</span> <span class="n">an</span> +<span class="s2">"AS IS"</span> <span class="n">BASIS</span><span class="p">,</span> <span class="n">WITHOUT</span> <span class="n">WARRANTIES</span> <span class="n">OR</span> <span class="n">CONDITIONS</span> <span class="n">OF</span> <span class="n">ANY</span> +<span class="n">KIND</span><span class="p">,</span> <span class="n">either</span> <span class="n">express</span> <span class="ow">or</span> <span class="n">implied</span><span class="o">.</span> <span class="n">See</span> <span class="n">the</span> <span class="n">License</span> <span class="k">for</span> <span class="n">the</span> +<span class="n">specific</span> <span class="n">language</span> <span class="n">governing</span> <span class="n">permissions</span> <span class="ow">and</span> <span class="n">limitations</span> +<span class="n">under</span> <span class="n">the</span> <span class="n">License</span><span class="o">.</span> +</pre></div> +</div> +<p>â></p> +<div class="section" id="train-a-rbm-model-against-mnist-dataset"> +<span id="train-a-rbm-model-against-mnist-dataset"></span><h1>Train a RBM model against MNIST dataset<a class="headerlink" href="#train-a-rbm-model-against-mnist-dataset" title="Permalink to this headline">¶</a></h1> <p>This example is to train an RBM model using the MNIST dataset. The RBM model and its hyper-parameters are set following -<a class="reference external" href="http://www.cs.toronto.edu/%7Ehinton/science.pdf">Hintonâs paper</a></p> +<a class="reference external" href="http://www.cs.toronto.edu/~hinton/science.pdf">Hintonâs paper</a></p> <div class="section" id="running-instructions"> -<h2>Running instructions<a class="headerlink" href="#running-instructions" title="Permalink to this headline">¶</a></h2> +<span id="running-instructions"></span><h2>Running instructions<a class="headerlink" href="#running-instructions" title="Permalink to this headline">¶</a></h2> <ol> -<li><p>Download the pre-processed <a class="reference external" href="https://github.com/mnielsen/neural-networks-and-deep-learning/raw/master/data/mnist.pkl.gz">MNIST dataset</a></p></li> -<li><p>Start the training</p> +<li><p class="first">Download the pre-processed <a class="reference external" href="https://github.com/mnielsen/neural-networks-and-deep-learning/raw/master/data/mnist.pkl.gz">MNIST dataset</a></p> +</li> +<li><p class="first">Start the training</p> <div class="highlight-default notranslate"><div class="highlight"><pre><span></span> <span class="n">python</span> <span class="n">train</span><span class="o">.</span><span class="n">py</span> <span class="n">mnist</span><span class="o">.</span><span class="n">pkl</span><span class="o">.</span><span class="n">gz</span> </pre></div> </div> @@ -230,7 +231,7 @@ the program with an additional argument< <div role="contentinfo"> <p> - © Copyright 2019 The Apache Software Foundation. All rights reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA project logos are trademarks of The Apache Software Foundation. All other marks mentioned may be trademarks or registered trademarks of their respective owners. + © Copyright 2019 The Apache Software Foundation. All rights reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA project logos are trademarks of The Apache Software Foundation. All other marks mentioned may be trademarks or registered trademarks of their respective owners.. </p> </div> @@ -247,17 +248,36 @@ the program with an additional argument< + + + <script type="text/javascript"> + var DOCUMENTATION_OPTIONS = { + URL_ROOT:'../../../../', + VERSION:'1.1.0', + LANGUAGE:'None', + COLLAPSE_INDEX:false, + FILE_SUFFIX:'.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt' + }; + </script> + <script type="text/javascript" src="../../../../_static/jquery.js"></script> + <script type="text/javascript" src="../../../../_static/underscore.js"></script> + <script type="text/javascript" src="../../../../_static/doctools.js"></script> + + + + + + <script type="text/javascript" src="../../../../_static/js/theme.js"></script> + + <script type="text/javascript"> jQuery(function () { SphinxRtdTheme.Navigation.enable(true); }); </script> - - - - - <div class="rst-versions" data-toggle="rst-versions" role="note" aria-label="versions"> <span class="rst-current-version" data-toggle="rst-current-version"> <span class="fa fa-book"> incubator-singa </span> Modified: incubator/singa/site/trunk/en/docs/model_zoo/imagenet/alexnet/README.html URL: http://svn.apache.org/viewvc/incubator/singa/site/trunk/en/docs/model_zoo/imagenet/alexnet/README.html?rev=1862313&r1=1862312&r2=1862313&view=diff ============================================================================== --- incubator/singa/site/trunk/en/docs/model_zoo/imagenet/alexnet/README.html (original) +++ incubator/singa/site/trunk/en/docs/model_zoo/imagenet/alexnet/README.html Sat Jun 29 14:42:24 2019 @@ -18,26 +18,20 @@ - <script type="text/javascript" src="../../../../_static/js/modernizr.min.js"></script> + + - <script type="text/javascript" id="documentation_options" data-url_root="../../../../" src="../../../../_static/documentation_options.js"></script> - <script type="text/javascript" src="../../../../_static/jquery.js"></script> - <script type="text/javascript" src="../../../../_static/underscore.js"></script> - <script type="text/javascript" src="../../../../_static/doctools.js"></script> - <script type="text/javascript" src="../../../../_static/language_data.js"></script> - - <script type="text/javascript" src="../../../../_static/js/theme.js"></script> - + - <link rel="stylesheet" href="../../../../_static/css/theme.css" type="text/css" /> + <link rel="stylesheet" href="../../../../_static/css/theme.css" type="text/css" /> <link rel="stylesheet" href="../../../../_static/pygments.css" type="text/css" /> <link rel="index" title="Index" href="../../../../genindex.html" /> <link rel="search" title="Search" href="../../../../search.html" /> - <link rel="next" title="Image Classification using DenseNet" href="../densenet/README.html" /> - <link rel="prev" title="Train a RBM model against MNIST dataset" href="../../mnist/README.html" /> + <link rel="next" title="Image Classification using GoogleNet" href="../googlenet/README.html" /> + <link rel="prev" title="Train Char-RNN over plain text" href="../../char-rnn/README.html" /> <link href="../../../../_static/style.css" rel="stylesheet" type="text/css"> <!--link href="../../../../_static/fontawesome-all.min.css" rel="stylesheet" type="text/css"--> <link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.0.13/css/all.css" integrity="sha384-DNOHZ68U8hZfKXOrtjWvjxusGo9WQnrNx2sqG0tfsghAvtVlRW3tvkXWZh58N9jp" crossorigin="anonymous"> @@ -50,16 +44,21 @@ } </style> + + + <script src="../../../../_static/js/modernizr.min.js"></script> + </head> <body class="wy-body-for-nav"> <div class="wy-grid-for-nav"> + <nav data-toggle="wy-nav-shift" class="wy-nav-side"> <div class="wy-side-scroll"> - <div class="wy-side-nav-search" > + <div class="wy-side-nav-search"> @@ -104,7 +103,6 @@ <li class="toctree-l1 current"><a class="reference internal" href="../../../index.html">Documentation</a><ul class="current"> <li class="toctree-l2"><a class="reference internal" href="../../../installation.html">Installation</a></li> <li class="toctree-l2"><a class="reference internal" href="../../../software_stack.html">Software Stack</a></li> -<li class="toctree-l2"><a class="reference internal" href="../../../benchmark.html">Benchmark for Distributed training</a></li> <li class="toctree-l2"><a class="reference internal" href="../../../device.html">Device</a></li> <li class="toctree-l2"><a class="reference internal" href="../../../tensor.html">Tensor</a></li> <li class="toctree-l2"><a class="reference internal" href="../../../layer.html">Layer</a></li> @@ -122,16 +120,11 @@ <li class="toctree-l2 current"><a class="reference internal" href="../../index.html">Model Zoo</a><ul class="current"> <li class="toctree-l3"><a class="reference internal" href="../../cifar10/README.html">Train CNN over Cifar-10</a></li> <li class="toctree-l3"><a class="reference internal" href="../../char-rnn/README.html">Train Char-RNN over plain text</a></li> -<li class="toctree-l3"><a class="reference internal" href="../../mnist/README.html">Train a RBM model against MNIST dataset</a></li> <li class="toctree-l3 current"><a class="current reference internal" href="#">Train AlexNet over ImageNet</a><ul> <li class="toctree-l4"><a class="reference internal" href="#instructions">Instructions</a></li> </ul> </li> -<li class="toctree-l3"><a class="reference internal" href="../densenet/README.html">Image Classification using DenseNet</a></li> <li class="toctree-l3"><a class="reference internal" href="../googlenet/README.html">Image Classification using GoogleNet</a></li> -<li class="toctree-l3"><a class="reference internal" href="../inception/README.html">Image Classification using Inception V4</a></li> -<li class="toctree-l3"><a class="reference internal" href="../resnet/README.html">Image Classification using Residual Networks</a></li> -<li class="toctree-l3"><a class="reference internal" href="../vgg/README.html">Image Classification using VGG</a></li> </ul> </li> <li class="toctree-l2"><a class="reference internal" href="../../../security.html">Security</a></li> @@ -220,51 +213,34 @@ <div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article"> <div itemprop="articleBody"> - <!-- - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. ---><div class="section" id="train-alexnet-over-imagenet"> -<h1>Train AlexNet over ImageNet<a class="headerlink" href="#train-alexnet-over-imagenet" title="Permalink to this headline">¶</a></h1> + <div class="section" id="train-alexnet-over-imagenet"> +<span id="train-alexnet-over-imagenet"></span><h1>Train AlexNet over ImageNet<a class="headerlink" href="#train-alexnet-over-imagenet" title="Permalink to this headline">¶</a></h1> <p>Convolution neural network (CNN) is a type of feed-forward neural network widely used for image and video classification. In this example, we will use a <a class="reference external" href="http://papers.nips.cc/paper/4824-imagenet-classification-with-deep-convolutional-neural-networks">deep CNN model</a> to do image classification against the ImageNet dataset.</p> <div class="section" id="instructions"> -<h2>Instructions<a class="headerlink" href="#instructions" title="Permalink to this headline">¶</a></h2> +<span id="instructions"></span><h2>Instructions<a class="headerlink" href="#instructions" title="Permalink to this headline">¶</a></h2> <div class="section" id="compile-singa"> -<h3>Compile SINGA<a class="headerlink" href="#compile-singa" title="Permalink to this headline">¶</a></h3> +<span id="compile-singa"></span><h3>Compile SINGA<a class="headerlink" href="#compile-singa" title="Permalink to this headline">¶</a></h3> <p>Please compile SINGA with CUDA, CUDNN and OpenCV. You can manually turn on the options in CMakeLists.txt or run <code class="docutils literal notranslate"><span class="pre">ccmake</span> <span class="pre">..</span></code> in build/ folder.</p> <p>We have tested CUDNN V4 and V5 (V5 requires CUDA 7.5)</p> </div> <div class="section" id="data-download"> -<h3>Data download<a class="headerlink" href="#data-download" title="Permalink to this headline">¶</a></h3> +<span id="data-download"></span><h3>Data download<a class="headerlink" href="#data-download" title="Permalink to this headline">¶</a></h3> <ul class="simple"> -<li><p>Please refer to step1-3 on <a class="reference external" href="https://github.com/amd/OpenCL-caffe/wiki/Instructions-to-create-ImageNet-2012-data">Instructions to create ImageNet 2012 data</a> -to download and decompress the data.</p></li> -<li><p>You can download the training and validation list by +<li>Please refer to step1-3 on <a class="reference external" href="https://github.com/amd/OpenCL-caffe/wiki/Instructions-to-create-ImageNet-2012-data">Instructions to create ImageNet 2012 data</a> +to download and decompress the data.</li> +<li>You can download the training and validation list by <a class="reference external" href="https://github.com/BVLC/caffe/blob/master/data/ilsvrc12/get_ilsvrc_aux.sh">get_ilsvrc_aux.sh</a> -or from <a class="reference external" href="http://www.image-net.org/download-images">Imagenet</a>.</p></li> +or from <a class="reference external" href="http://www.image-net.org/download-images">Imagenet</a>.</li> </ul> </div> <div class="section" id="data-preprocessing"> -<h3>Data preprocessing<a class="headerlink" href="#data-preprocessing" title="Permalink to this headline">¶</a></h3> +<span id="data-preprocessing"></span><h3>Data preprocessing<a class="headerlink" href="#data-preprocessing" title="Permalink to this headline">¶</a></h3> <ul> -<li><p>Assuming you have downloaded the data and the list. +<li><p class="first">Assuming you have downloaded the data and the list. Now we should transform the data into binary files. You can run:</p> <div class="highlight-default notranslate"><div class="highlight"><pre><span></span> <span class="n">sh</span> <span class="n">create_data</span><span class="o">.</span><span class="n">sh</span> </pre></div> @@ -272,41 +248,41 @@ Now we should transform the data into bi <p>The script will generate a test file(<code class="docutils literal notranslate"><span class="pre">test.bin</span></code>), a mean file(<code class="docutils literal notranslate"><span class="pre">mean.bin</span></code>) and several training files(<code class="docutils literal notranslate"><span class="pre">trainX.bin</span></code>) in the specified output folder.</p> </li> -<li><p>You can also change the parameters in <code class="docutils literal notranslate"><span class="pre">create_data.sh</span></code>.</p> +<li><p class="first">You can also change the parameters in <code class="docutils literal notranslate"><span class="pre">create_data.sh</span></code>.</p> <ul class="simple"> -<li><p><code class="docutils literal notranslate"><span class="pre">-trainlist</span> <span class="pre"><file></span></code>: the file of training list;</p></li> -<li><p><code class="docutils literal notranslate"><span class="pre">-trainfolder</span> <span class="pre"><folder></span></code>: the folder of training images;</p></li> -<li><p><code class="docutils literal notranslate"><span class="pre">-testlist</span> <span class="pre"><file></span></code>: the file of test list;</p></li> -<li><p><code class="docutils literal notranslate"><span class="pre">-testfolder</span> <span class="pre"><floder></span></code>: the folder of test images;</p></li> -<li><p><code class="docutils literal notranslate"><span class="pre">-outdata</span> <span class="pre"><folder></span></code>: the folder to save output files, including mean, training and test files. -The script will generate these files in the specified folder;</p></li> -<li><p><code class="docutils literal notranslate"><span class="pre">-filesize</span> <span class="pre"><int></span></code>: number of training images that stores in each binary file.</p></li> +<li><code class="docutils literal notranslate"><span class="pre">-trainlist</span> <span class="pre"><file></span></code>: the file of training list;</li> +<li><code class="docutils literal notranslate"><span class="pre">-trainfolder</span> <span class="pre"><folder></span></code>: the folder of training images;</li> +<li><code class="docutils literal notranslate"><span class="pre">-testlist</span> <span class="pre"><file></span></code>: the file of test list;</li> +<li><code class="docutils literal notranslate"><span class="pre">-testfolder</span> <span class="pre"><floder></span></code>: the folder of test images;</li> +<li><code class="docutils literal notranslate"><span class="pre">-outdata</span> <span class="pre"><folder></span></code>: the folder to save output files, including mean, training and test files. +The script will generate these files in the specified folder;</li> +<li><code class="docutils literal notranslate"><span class="pre">-filesize</span> <span class="pre"><int></span></code>: number of training images that stores in each binary file.</li> </ul> </li> </ul> </div> <div class="section" id="training"> -<h3>Training<a class="headerlink" href="#training" title="Permalink to this headline">¶</a></h3> +<span id="training"></span><h3>Training<a class="headerlink" href="#training" title="Permalink to this headline">¶</a></h3> <ul> -<li><p>After preparing data, you can run the following command to train the Alexnet model.</p> +<li><p class="first">After preparing data, you can run the following command to train the Alexnet model.</p> <div class="highlight-default notranslate"><div class="highlight"><pre><span></span> <span class="n">sh</span> <span class="n">run</span><span class="o">.</span><span class="n">sh</span> </pre></div> </div> </li> -<li><p>You may change the parameters in <code class="docutils literal notranslate"><span class="pre">run.sh</span></code>.</p> +<li><p class="first">You may change the parameters in <code class="docutils literal notranslate"><span class="pre">run.sh</span></code>.</p> <ul class="simple"> -<li><p><code class="docutils literal notranslate"><span class="pre">-epoch</span> <span class="pre"><int></span></code>: number of epoch to be trained, default is 90;</p></li> -<li><p><code class="docutils literal notranslate"><span class="pre">-lr</span> <span class="pre"><float></span></code>: base learning rate, the learning rate will decrease each 20 epochs, -more specifically, <code class="docutils literal notranslate"><span class="pre">lr</span> <span class="pre">=</span> <span class="pre">lr</span> <span class="pre">*</span> <span class="pre">exp(0.1</span> <span class="pre">*</span> <span class="pre">(epoch</span> <span class="pre">/</span> <span class="pre">20))</span></code>;</p></li> -<li><p><code class="docutils literal notranslate"><span class="pre">-batchsize</span> <span class="pre"><int></span></code>: batchsize, it should be changed regarding to your memory;</p></li> -<li><p><code class="docutils literal notranslate"><span class="pre">-filesize</span> <span class="pre"><int></span></code>: number of training images that stores in each binary file, it is the -same as the <code class="docutils literal notranslate"><span class="pre">filesize</span></code> in data preprocessing;</p></li> -<li><p><code class="docutils literal notranslate"><span class="pre">-ntrain</span> <span class="pre"><int></span></code>: number of training images;</p></li> -<li><p><code class="docutils literal notranslate"><span class="pre">-ntest</span> <span class="pre"><int></span></code>: number of test images;</p></li> -<li><p><code class="docutils literal notranslate"><span class="pre">-data</span> <span class="pre"><folder></span></code>: the folder which stores the binary files, it is exactly the output -folder in data preprocessing step;</p></li> -<li><p><code class="docutils literal notranslate"><span class="pre">-pfreq</span> <span class="pre"><int></span></code>: the frequency(in batch) of printing current model status(loss and accuracy);</p></li> -<li><p><code class="docutils literal notranslate"><span class="pre">-nthreads</span> <span class="pre"><int></span></code>: the number of threads to load data which feed to the model.</p></li> +<li><code class="docutils literal notranslate"><span class="pre">-epoch</span> <span class="pre"><int></span></code>: number of epoch to be trained, default is 90;</li> +<li><code class="docutils literal notranslate"><span class="pre">-lr</span> <span class="pre"><float></span></code>: base learning rate, the learning rate will decrease each 20 epochs, +more specifically, <code class="docutils literal notranslate"><span class="pre">lr</span> <span class="pre">=</span> <span class="pre">lr</span> <span class="pre">*</span> <span class="pre">exp(0.1</span> <span class="pre">*</span> <span class="pre">(epoch</span> <span class="pre">/</span> <span class="pre">20))</span></code>;</li> +<li><code class="docutils literal notranslate"><span class="pre">-batchsize</span> <span class="pre"><int></span></code>: batchsize, it should be changed regarding to your memory;</li> +<li><code class="docutils literal notranslate"><span class="pre">-filesize</span> <span class="pre"><int></span></code>: number of training images that stores in each binary file, it is the +same as the <code class="docutils literal notranslate"><span class="pre">filesize</span></code> in data preprocessing;</li> +<li><code class="docutils literal notranslate"><span class="pre">-ntrain</span> <span class="pre"><int></span></code>: number of training images;</li> +<li><code class="docutils literal notranslate"><span class="pre">-ntest</span> <span class="pre"><int></span></code>: number of test images;</li> +<li><code class="docutils literal notranslate"><span class="pre">-data</span> <span class="pre"><folder></span></code>: the folder which stores the binary files, it is exactly the output +folder in data preprocessing step;</li> +<li><code class="docutils literal notranslate"><span class="pre">-pfreq</span> <span class="pre"><int></span></code>: the frequency(in batch) of printing current model status(loss and accuracy);</li> +<li><code class="docutils literal notranslate"><span class="pre">-nthreads</span> <span class="pre"><int></span></code>: the number of threads to load data which feed to the model.</li> </ul> </li> </ul> @@ -322,10 +298,10 @@ folder in data preprocessing step;</p></ <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation"> - <a href="../densenet/README.html" class="btn btn-neutral float-right" title="Image Classification using DenseNet" accesskey="n" rel="next">Next <span class="fa fa-arrow-circle-right"></span></a> + <a href="../googlenet/README.html" class="btn btn-neutral float-right" title="Image Classification using GoogleNet" accesskey="n" rel="next">Next <span class="fa fa-arrow-circle-right"></span></a> - <a href="../../mnist/README.html" class="btn btn-neutral float-left" title="Train a RBM model against MNIST dataset" accesskey="p" rel="prev"><span class="fa fa-arrow-circle-left"></span> Previous</a> + <a href="../../char-rnn/README.html" class="btn btn-neutral" title="Train Char-RNN over plain text" accesskey="p" rel="prev"><span class="fa fa-arrow-circle-left"></span> Previous</a> </div> @@ -334,7 +310,7 @@ folder in data preprocessing step;</p></ <div role="contentinfo"> <p> - © Copyright 2019 The Apache Software Foundation. All rights reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA project logos are trademarks of The Apache Software Foundation. All other marks mentioned may be trademarks or registered trademarks of their respective owners. + © Copyright 2019 The Apache Software Foundation. All rights reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA project logos are trademarks of The Apache Software Foundation. All other marks mentioned may be trademarks or registered trademarks of their respective owners.. </p> </div> @@ -351,17 +327,36 @@ folder in data preprocessing step;</p></ + + + <script type="text/javascript"> + var DOCUMENTATION_OPTIONS = { + URL_ROOT:'../../../../', + VERSION:'1.1.0', + LANGUAGE:'None', + COLLAPSE_INDEX:false, + FILE_SUFFIX:'.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt' + }; + </script> + <script type="text/javascript" src="../../../../_static/jquery.js"></script> + <script type="text/javascript" src="../../../../_static/underscore.js"></script> + <script type="text/javascript" src="../../../../_static/doctools.js"></script> + + + + + + <script type="text/javascript" src="../../../../_static/js/theme.js"></script> + + <script type="text/javascript"> jQuery(function () { SphinxRtdTheme.Navigation.enable(true); }); </script> - - - - - <div class="rst-versions" data-toggle="rst-versions" role="note" aria-label="versions"> <span class="rst-current-version" data-toggle="rst-current-version"> <span class="fa fa-book"> incubator-singa </span> Modified: incubator/singa/site/trunk/en/docs/model_zoo/imagenet/googlenet/README.html URL: http://svn.apache.org/viewvc/incubator/singa/site/trunk/en/docs/model_zoo/imagenet/googlenet/README.html?rev=1862313&r1=1862312&r2=1862313&view=diff ============================================================================== --- incubator/singa/site/trunk/en/docs/model_zoo/imagenet/googlenet/README.html (original) +++ incubator/singa/site/trunk/en/docs/model_zoo/imagenet/googlenet/README.html Sat Jun 29 14:42:24 2019 @@ -18,26 +18,20 @@ - <script type="text/javascript" src="../../../../_static/js/modernizr.min.js"></script> + + - <script type="text/javascript" id="documentation_options" data-url_root="../../../../" src="../../../../_static/documentation_options.js"></script> - <script type="text/javascript" src="../../../../_static/jquery.js"></script> - <script type="text/javascript" src="../../../../_static/underscore.js"></script> - <script type="text/javascript" src="../../../../_static/doctools.js"></script> - <script type="text/javascript" src="../../../../_static/language_data.js"></script> - - <script type="text/javascript" src="../../../../_static/js/theme.js"></script> - + - <link rel="stylesheet" href="../../../../_static/css/theme.css" type="text/css" /> + <link rel="stylesheet" href="../../../../_static/css/theme.css" type="text/css" /> <link rel="stylesheet" href="../../../../_static/pygments.css" type="text/css" /> <link rel="index" title="Index" href="../../../../genindex.html" /> <link rel="search" title="Search" href="../../../../search.html" /> - <link rel="next" title="Image Classification using Inception V4" href="../inception/README.html" /> - <link rel="prev" title="Image Classification using DenseNet" href="../densenet/README.html" /> + <link rel="next" title="Security" href="../../../security.html" /> + <link rel="prev" title="Train AlexNet over ImageNet" href="../alexnet/README.html" /> <link href="../../../../_static/style.css" rel="stylesheet" type="text/css"> <!--link href="../../../../_static/fontawesome-all.min.css" rel="stylesheet" type="text/css"--> <link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.0.13/css/all.css" integrity="sha384-DNOHZ68U8hZfKXOrtjWvjxusGo9WQnrNx2sqG0tfsghAvtVlRW3tvkXWZh58N9jp" crossorigin="anonymous"> @@ -50,16 +44,21 @@ } </style> + + + <script src="../../../../_static/js/modernizr.min.js"></script> + </head> <body class="wy-body-for-nav"> <div class="wy-grid-for-nav"> + <nav data-toggle="wy-nav-shift" class="wy-nav-side"> <div class="wy-side-scroll"> - <div class="wy-side-nav-search" > + <div class="wy-side-nav-search"> @@ -104,7 +103,6 @@ <li class="toctree-l1 current"><a class="reference internal" href="../../../index.html">Documentation</a><ul class="current"> <li class="toctree-l2"><a class="reference internal" href="../../../installation.html">Installation</a></li> <li class="toctree-l2"><a class="reference internal" href="../../../software_stack.html">Software Stack</a></li> -<li class="toctree-l2"><a class="reference internal" href="../../../benchmark.html">Benchmark for Distributed training</a></li> <li class="toctree-l2"><a class="reference internal" href="../../../device.html">Device</a></li> <li class="toctree-l2"><a class="reference internal" href="../../../tensor.html">Tensor</a></li> <li class="toctree-l2"><a class="reference internal" href="../../../layer.html">Layer</a></li> @@ -122,17 +120,12 @@ <li class="toctree-l2 current"><a class="reference internal" href="../../index.html">Model Zoo</a><ul class="current"> <li class="toctree-l3"><a class="reference internal" href="../../cifar10/README.html">Train CNN over Cifar-10</a></li> <li class="toctree-l3"><a class="reference internal" href="../../char-rnn/README.html">Train Char-RNN over plain text</a></li> -<li class="toctree-l3"><a class="reference internal" href="../../mnist/README.html">Train a RBM model against MNIST dataset</a></li> <li class="toctree-l3"><a class="reference internal" href="../alexnet/README.html">Train AlexNet over ImageNet</a></li> -<li class="toctree-l3"><a class="reference internal" href="../densenet/README.html">Image Classification using DenseNet</a></li> <li class="toctree-l3 current"><a class="current reference internal" href="#">Image Classification using GoogleNet</a><ul> <li class="toctree-l4"><a class="reference internal" href="#instructions">Instructions</a></li> <li class="toctree-l4"><a class="reference internal" href="#details">Details</a></li> </ul> </li> -<li class="toctree-l3"><a class="reference internal" href="../inception/README.html">Image Classification using Inception V4</a></li> -<li class="toctree-l3"><a class="reference internal" href="../resnet/README.html">Image Classification using Residual Networks</a></li> -<li class="toctree-l3"><a class="reference internal" href="../vgg/README.html">Image Classification using VGG</a></li> </ul> </li> <li class="toctree-l2"><a class="reference internal" href="../../../security.html">Security</a></li> @@ -221,36 +214,27 @@ <div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article"> <div itemprop="articleBody"> - <!-- - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. ---><div class="section" id="image-classification-using-googlenet"> -<h1>Image Classification using GoogleNet<a class="headerlink" href="#image-classification-using-googlenet" title="Permalink to this headline">¶</a></h1> -<p>In this example, we convert GoogleNet trained on Caffe to SINGA for image classification. Tested on <a class="reference external" href="8c990f7da2de220e8a012c6a8ecc897dc7532744">SINGA commit</a> with <a class="reference external" href="https://s3-ap-southeast-1.amazonaws.com/dlfile/bvlc_googlenet.tar.gz">the parameters</a>.</p> + <hr class="docutils" /> +<p>name: GoogleNet on ImageNet +SINGA version: 1.0.1 +SINGA commit: 8c990f7da2de220e8a012c6a8ecc897dc7532744 +parameter_url: https://s3-ap-southeast-1.amazonaws.com/dlfile/bvlc_googlenet.tar.gz +parameter_sha1: 0a88e8948b1abca3badfd8d090d6be03f8d7655d +license: unrestricted https://github.com/BVLC/caffe/tree/master/models/bvlc_googlenet</p> +<hr class="docutils" /> +<div class="section" id="image-classification-using-googlenet"> +<span id="image-classification-using-googlenet"></span><h1>Image Classification using GoogleNet<a class="headerlink" href="#image-classification-using-googlenet" title="Permalink to this headline">¶</a></h1> +<p>In this example, we convert GoogleNet trained on Caffe to SINGA for image classification.</p> <div class="section" id="instructions"> -<h2>Instructions<a class="headerlink" href="#instructions" title="Permalink to this headline">¶</a></h2> +<span id="instructions"></span><h2>Instructions<a class="headerlink" href="#instructions" title="Permalink to this headline">¶</a></h2> <ul> -<li><p>Download the parameter checkpoint file into this folder</p> +<li><p class="first">Download the parameter checkpoint file into this folder</p> <div class="highlight-default notranslate"><div class="highlight"><pre><span></span> $ wget https://s3-ap-southeast-1.amazonaws.com/dlfile/bvlc_googlenet.tar.gz $ tar xvf bvlc_googlenet.tar.gz </pre></div> </div> </li> -<li><p>Run the program</p> +<li><p class="first">Run the program</p> <div class="highlight-default notranslate"><div class="highlight"><pre><span></span> # use cpu $ python serve.py -C & # use gpu @@ -258,7 +242,7 @@ </pre></div> </div> </li> -<li><p>Submit images for classification</p> +<li><p class="first">Submit images for classification</p> <div class="highlight-default notranslate"><div class="highlight"><pre><span></span> $ curl -i -F [email protected] http://localhost:9999/api $ curl -i -F [email protected] http://localhost:9999/api $ curl -i -F [email protected] http://localhost:9999/api @@ -269,7 +253,7 @@ <p>image1.jpg, image2.jpg and image3.jpg should be downloaded before executing the above commands.</p> </div> <div class="section" id="details"> -<h2>Details<a class="headerlink" href="#details" title="Permalink to this headline">¶</a></h2> +<span id="details"></span><h2>Details<a class="headerlink" href="#details" title="Permalink to this headline">¶</a></h2> <p>We first extract the parameter values from <a class="reference external" href="http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel">Caffeâs checkpoint file</a> into a pickle version After downloading the checkpoint file into <code class="docutils literal notranslate"><span class="pre">caffe_root/python</span></code> folder, run the following script</p> <div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="c1"># to be executed within caffe_root/python folder</span> @@ -309,10 +293,10 @@ Refer to <a class="reference external" h <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation"> - <a href="../inception/README.html" class="btn btn-neutral float-right" title="Image Classification using Inception V4" accesskey="n" rel="next">Next <span class="fa fa-arrow-circle-right"></span></a> + <a href="../../../security.html" class="btn btn-neutral float-right" title="Security" accesskey="n" rel="next">Next <span class="fa fa-arrow-circle-right"></span></a> - <a href="../densenet/README.html" class="btn btn-neutral float-left" title="Image Classification using DenseNet" accesskey="p" rel="prev"><span class="fa fa-arrow-circle-left"></span> Previous</a> + <a href="../alexnet/README.html" class="btn btn-neutral" title="Train AlexNet over ImageNet" accesskey="p" rel="prev"><span class="fa fa-arrow-circle-left"></span> Previous</a> </div> @@ -321,7 +305,7 @@ Refer to <a class="reference external" h <div role="contentinfo"> <p> - © Copyright 2019 The Apache Software Foundation. All rights reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA project logos are trademarks of The Apache Software Foundation. All other marks mentioned may be trademarks or registered trademarks of their respective owners. + © Copyright 2019 The Apache Software Foundation. All rights reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA project logos are trademarks of The Apache Software Foundation. All other marks mentioned may be trademarks or registered trademarks of their respective owners.. </p> </div> @@ -338,17 +322,36 @@ Refer to <a class="reference external" h + + + <script type="text/javascript"> + var DOCUMENTATION_OPTIONS = { + URL_ROOT:'../../../../', + VERSION:'1.1.0', + LANGUAGE:'None', + COLLAPSE_INDEX:false, + FILE_SUFFIX:'.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt' + }; + </script> + <script type="text/javascript" src="../../../../_static/jquery.js"></script> + <script type="text/javascript" src="../../../../_static/underscore.js"></script> + <script type="text/javascript" src="../../../../_static/doctools.js"></script> + + + + + + <script type="text/javascript" src="../../../../_static/js/theme.js"></script> + + <script type="text/javascript"> jQuery(function () { SphinxRtdTheme.Navigation.enable(true); }); </script> - - - - - <div class="rst-versions" data-toggle="rst-versions" role="note" aria-label="versions"> <span class="rst-current-version" data-toggle="rst-current-version"> <span class="fa fa-book"> incubator-singa </span> Modified: incubator/singa/site/trunk/en/docs/model_zoo/imagenet/inception/README.html URL: http://svn.apache.org/viewvc/incubator/singa/site/trunk/en/docs/model_zoo/imagenet/inception/README.html?rev=1862313&r1=1862312&r2=1862313&view=diff ============================================================================== --- incubator/singa/site/trunk/en/docs/model_zoo/imagenet/inception/README.html (original) +++ incubator/singa/site/trunk/en/docs/model_zoo/imagenet/inception/README.html Sat Jun 29 14:42:24 2019 @@ -18,26 +18,18 @@ - <script type="text/javascript" src="../../../../_static/js/modernizr.min.js"></script> + + - <script type="text/javascript" id="documentation_options" data-url_root="../../../../" src="../../../../_static/documentation_options.js"></script> - <script type="text/javascript" src="../../../../_static/jquery.js"></script> - <script type="text/javascript" src="../../../../_static/underscore.js"></script> - <script type="text/javascript" src="../../../../_static/doctools.js"></script> - <script type="text/javascript" src="../../../../_static/language_data.js"></script> - - <script type="text/javascript" src="../../../../_static/js/theme.js"></script> - + - <link rel="stylesheet" href="../../../../_static/css/theme.css" type="text/css" /> + <link rel="stylesheet" href="../../../../_static/css/theme.css" type="text/css" /> <link rel="stylesheet" href="../../../../_static/pygments.css" type="text/css" /> <link rel="index" title="Index" href="../../../../genindex.html" /> <link rel="search" title="Search" href="../../../../search.html" /> - <link rel="next" title="Image Classification using Residual Networks" href="../resnet/README.html" /> - <link rel="prev" title="Image Classification using GoogleNet" href="../googlenet/README.html" /> <link href="../../../../_static/style.css" rel="stylesheet" type="text/css"> <!--link href="../../../../_static/fontawesome-all.min.css" rel="stylesheet" type="text/css"--> <link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.0.13/css/all.css" integrity="sha384-DNOHZ68U8hZfKXOrtjWvjxusGo9WQnrNx2sqG0tfsghAvtVlRW3tvkXWZh58N9jp" crossorigin="anonymous"> @@ -50,16 +42,21 @@ } </style> + + + <script src="../../../../_static/js/modernizr.min.js"></script> + </head> <body class="wy-body-for-nav"> <div class="wy-grid-for-nav"> + <nav data-toggle="wy-nav-shift" class="wy-nav-side"> <div class="wy-side-scroll"> - <div class="wy-side-nav-search" > + <div class="wy-side-nav-search"> @@ -100,44 +97,8 @@ - <ul class="current"> -<li class="toctree-l1 current"><a class="reference internal" href="../../../index.html">Documentation</a><ul class="current"> -<li class="toctree-l2"><a class="reference internal" href="../../../installation.html">Installation</a></li> -<li class="toctree-l2"><a class="reference internal" href="../../../software_stack.html">Software Stack</a></li> -<li class="toctree-l2"><a class="reference internal" href="../../../benchmark.html">Benchmark for Distributed training</a></li> -<li class="toctree-l2"><a class="reference internal" href="../../../device.html">Device</a></li> -<li class="toctree-l2"><a class="reference internal" href="../../../tensor.html">Tensor</a></li> -<li class="toctree-l2"><a class="reference internal" href="../../../layer.html">Layer</a></li> -<li class="toctree-l2"><a class="reference internal" href="../../../net.html">FeedForward Net</a></li> -<li class="toctree-l2"><a class="reference internal" href="../../../initializer.html">Initializer</a></li> -<li class="toctree-l2"><a class="reference internal" href="../../../loss.html">Loss</a></li> -<li class="toctree-l2"><a class="reference internal" href="../../../metric.html">Metric</a></li> -<li class="toctree-l2"><a class="reference internal" href="../../../optimizer.html">Optimizer</a></li> -<li class="toctree-l2"><a class="reference internal" href="../../../autograd.html">Autograd in Singa</a></li> -<li class="toctree-l2"><a class="reference internal" href="../../../data.html">Data</a></li> -<li class="toctree-l2"><a class="reference internal" href="../../../image_tool.html">Image Tool</a></li> -<li class="toctree-l2"><a class="reference internal" href="../../../snapshot.html">Snapshot</a></li> -<li class="toctree-l2"><a class="reference internal" href="../../../converter.html">Caffe Converter</a></li> -<li class="toctree-l2"><a class="reference internal" href="../../../utils.html">Utils</a></li> -<li class="toctree-l2 current"><a class="reference internal" href="../../index.html">Model Zoo</a><ul class="current"> -<li class="toctree-l3"><a class="reference internal" href="../../cifar10/README.html">Train CNN over Cifar-10</a></li> -<li class="toctree-l3"><a class="reference internal" href="../../char-rnn/README.html">Train Char-RNN over plain text</a></li> -<li class="toctree-l3"><a class="reference internal" href="../../mnist/README.html">Train a RBM model against MNIST dataset</a></li> -<li class="toctree-l3"><a class="reference internal" href="../alexnet/README.html">Train AlexNet over ImageNet</a></li> -<li class="toctree-l3"><a class="reference internal" href="../densenet/README.html">Image Classification using DenseNet</a></li> -<li class="toctree-l3"><a class="reference internal" href="../googlenet/README.html">Image Classification using GoogleNet</a></li> -<li class="toctree-l3 current"><a class="current reference internal" href="#">Image Classification using Inception V4</a><ul> -<li class="toctree-l4"><a class="reference internal" href="#instructions">Instructions</a></li> -<li class="toctree-l4"><a class="reference internal" href="#details">Details</a></li> -</ul> -</li> -<li class="toctree-l3"><a class="reference internal" href="../resnet/README.html">Image Classification using Residual Networks</a></li> -<li class="toctree-l3"><a class="reference internal" href="../vgg/README.html">Image Classification using VGG</a></li> -</ul> -</li> -<li class="toctree-l2"><a class="reference internal" href="../../../security.html">Security</a></li> -</ul> -</li> + <ul> +<li class="toctree-l1"><a class="reference internal" href="../../../index.html">Documentation</a></li> <li class="toctree-l1"><a class="reference internal" href="../../../../downloads.html">Download SINGA</a></li> </ul> <p class="caption"><span class="caption-text">Development</span></p> @@ -200,10 +161,6 @@ <li><a href="../../../../index.html">Docs</a> »</li> - <li><a href="../../../index.html">Documentation</a> »</li> - - <li><a href="../../index.html">Model Zoo</a> »</li> - <li>Image Classification using Inception V4</li> @@ -221,37 +178,29 @@ <div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article"> <div itemprop="articleBody"> - <!-- - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. ---><div class="section" id="image-classification-using-inception-v4"> -<h1>Image Classification using Inception V4<a class="headerlink" href="#image-classification-using-inception-v4" title="Permalink to this headline">¶</a></h1> -<p>In this example, we convert Inception V4 trained on Tensorflow to SINGA for image classification. Tested on SINGA version 1.1.1 with <a class="reference external" href="https://s3-ap-southeast-1.amazonaws.com/dlfile/inception_v4.tar.gz">parameters pretrained by tensorflow</a>.</p> + <hr class="docutils" /> +<p>name: Inception V4 on ImageNet +SINGA version: 1.1.1 +SINGA commit: +parameter_url: https://s3-ap-southeast-1.amazonaws.com/dlfile/inception_v4.tar.gz +parameter_sha1: 5fdd6f5d8af8fd10e7321d9b38bb87ef14e80d56 +license: https://github.com/tensorflow/models/tree/master/slim</p> +<hr class="docutils" /> +<div class="section" id="image-classification-using-inception-v4"> +<span id="image-classification-using-inception-v4"></span><h1>Image Classification using Inception V4<a class="headerlink" href="#image-classification-using-inception-v4" title="Permalink to this headline">¶</a></h1> +<p>In this example, we convert Inception V4 trained on Tensorflow to SINGA for image classification.</p> <div class="section" id="instructions"> -<h2>Instructions<a class="headerlink" href="#instructions" title="Permalink to this headline">¶</a></h2> +<span id="instructions"></span><h2>Instructions<a class="headerlink" href="#instructions" title="Permalink to this headline">¶</a></h2> <ul> -<li><p>Download the parameter checkpoint file</p> +<li><p class="first">Download the parameter checkpoint file</p> <div class="highlight-default notranslate"><div class="highlight"><pre><span></span> $ wget $ tar xvf inception_v4.tar.gz </pre></div> </div> </li> -<li><p>Download <a class="reference external" href="https://github.com/BVLC/caffe/blob/master/data/ilsvrc12/get_ilsvrc_aux.sh">synset_word.txt</a> file.</p></li> -<li><p>Run the program</p> +<li><p class="first">Download <a class="reference external" href="https://github.com/BVLC/caffe/blob/master/data/ilsvrc12/get_ilsvrc_aux.sh">synset_word.txt</a> file.</p> +</li> +<li><p class="first">Run the program</p> <div class="highlight-default notranslate"><div class="highlight"><pre><span></span> # use cpu $ python serve.py -C & # use gpu @@ -259,7 +208,7 @@ </pre></div> </div> </li> -<li><p>Submit images for classification</p> +<li><p class="first">Submit images for classification</p> <div class="highlight-default notranslate"><div class="highlight"><pre><span></span> $ curl -i -F [email protected] http://localhost:9999/api $ curl -i -F [email protected] http://localhost:9999/api $ curl -i -F [email protected] http://localhost:9999/api @@ -270,7 +219,7 @@ <p>image1.jpg, image2.jpg and image3.jpg should be downloaded before executing the above commands.</p> </div> <div class="section" id="details"> -<h2>Details<a class="headerlink" href="#details" title="Permalink to this headline">¶</a></h2> +<span id="details"></span><h2>Details<a class="headerlink" href="#details" title="Permalink to this headline">¶</a></h2> <p>We first extract the parameter values from <a class="reference external" href="http://download.tensorflow.org/models/inception_v4_2016_09_09.tar.gz">Tensorflowâs checkpoint file</a> into a pickle version. After downloading and decompressing the checkpoint file, run the following script</p> <div class="highlight-default notranslate"><div class="highlight"><pre><span></span>$ python convert.py --file_name=inception_v4.ckpt @@ -285,21 +234,12 @@ After downloading and decompressing the </div> <footer> - <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation"> - - <a href="../resnet/README.html" class="btn btn-neutral float-right" title="Image Classification using Residual Networks" accesskey="n" rel="next">Next <span class="fa fa-arrow-circle-right"></span></a> - - - <a href="../googlenet/README.html" class="btn btn-neutral float-left" title="Image Classification using GoogleNet" accesskey="p" rel="prev"><span class="fa fa-arrow-circle-left"></span> Previous</a> - - </div> - <hr/> <div role="contentinfo"> <p> - © Copyright 2019 The Apache Software Foundation. All rights reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA project logos are trademarks of The Apache Software Foundation. All other marks mentioned may be trademarks or registered trademarks of their respective owners. + © Copyright 2019 The Apache Software Foundation. All rights reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA project logos are trademarks of The Apache Software Foundation. All other marks mentioned may be trademarks or registered trademarks of their respective owners.. </p> </div> @@ -316,17 +256,36 @@ After downloading and decompressing the + + + <script type="text/javascript"> + var DOCUMENTATION_OPTIONS = { + URL_ROOT:'../../../../', + VERSION:'1.1.0', + LANGUAGE:'None', + COLLAPSE_INDEX:false, + FILE_SUFFIX:'.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt' + }; + </script> + <script type="text/javascript" src="../../../../_static/jquery.js"></script> + <script type="text/javascript" src="../../../../_static/underscore.js"></script> + <script type="text/javascript" src="../../../../_static/doctools.js"></script> + + + + + + <script type="text/javascript" src="../../../../_static/js/theme.js"></script> + + <script type="text/javascript"> jQuery(function () { SphinxRtdTheme.Navigation.enable(true); }); </script> - - - - - <div class="rst-versions" data-toggle="rst-versions" role="note" aria-label="versions"> <span class="rst-current-version" data-toggle="rst-current-version"> <span class="fa fa-book"> incubator-singa </span>
