Added: incubator/singa/site/trunk/en/docs/layer.html
URL: 
http://svn.apache.org/viewvc/incubator/singa/site/trunk/en/docs/layer.html?rev=1858059&view=auto
==============================================================================
--- incubator/singa/site/trunk/en/docs/layer.html (added)
+++ incubator/singa/site/trunk/en/docs/layer.html Wed Apr 24 14:57:35 2019
@@ -0,0 +1,1250 @@
+
+
+
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  <title>Layer &mdash; incubator-singa 1.1.0 documentation</title>
+  
+
+  
+  
+  
+  
+
+  
+  <script type="text/javascript" src="../_static/js/modernizr.min.js"></script>
+  
+    
+      <script type="text/javascript" id="documentation_options" 
data-url_root="../" src="../_static/documentation_options.js"></script>
+        <script type="text/javascript" src="../_static/jquery.js"></script>
+        <script type="text/javascript" src="../_static/underscore.js"></script>
+        <script type="text/javascript" src="../_static/doctools.js"></script>
+        <script type="text/javascript" 
src="../_static/language_data.js"></script>
+    
+    <script type="text/javascript" src="../_static/js/theme.js"></script>
+
+    
+
+  
+  <link rel="stylesheet" href="../_static/css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" />
+    <link rel="next" title="FeedForward Net" href="net.html" />
+    <link rel="prev" title="Tensor" href="tensor.html" />
+    <link href="../_static/style.css" rel="stylesheet" type="text/css">
+    <!--link href="../_static/fontawesome-all.min.css" rel="stylesheet" 
type="text/css"-->
+       <link rel="stylesheet" 
href="https://use.fontawesome.com/releases/v5.0.13/css/all.css"; 
integrity="sha384-DNOHZ68U8hZfKXOrtjWvjxusGo9WQnrNx2sqG0tfsghAvtVlRW3tvkXWZh58N9jp"
 crossorigin="anonymous">
+    <style>
+       .fa:hover {
+           opacity: 0.7;
+       }
+       .fab:hover {
+           opacity: 0.7;
+       }
+    </style>
+
+</head>
+
+<body class="wy-body-for-nav">
+
+   
+  <div class="wy-grid-for-nav">
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side">
+      <div class="wy-side-scroll">
+        <div class="wy-side-nav-search" >
+          
+
+          
+            <a href="../index.html" class="icon icon-home"> incubator-singa
+          
+
+          
+            
+            <img src="../_static/singa.png" class="logo" alt="Logo"/>
+          
+          </a>
+
+          
+            
+            
+              <div class="version">
+                latest
+              </div>
+            
+          
+
+          
+<div role="search">
+  <form id="rtd-search-form" class="wy-form" action="../search.html" 
method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+    <input type="hidden" name="check_keywords" value="yes" />
+    <input type="hidden" name="area" value="default" />
+  </form>
+</div>
+
+          
+        </div>
+
+        <div class="wy-menu wy-menu-vertical" data-spy="affix" 
role="navigation" aria-label="main navigation">
+          
+            
+            
+              
+            
+            
+              <ul class="current">
+<li class="toctree-l1 current"><a class="reference internal" 
href="index.html">Documentation</a><ul class="current">
+<li class="toctree-l2"><a class="reference internal" 
href="installation.html">Installation</a></li>
+<li class="toctree-l2"><a class="reference internal" 
href="software_stack.html">Software Stack</a></li>
+<li class="toctree-l2"><a class="reference internal" 
href="device.html">Device</a></li>
+<li class="toctree-l2"><a class="reference internal" 
href="tensor.html">Tensor</a></li>
+<li class="toctree-l2 current"><a class="current reference internal" 
href="#">Layer</a><ul>
+<li class="toctree-l3"><a class="reference internal" 
href="#module-singa.layer">Python API</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#cpp-api">CPP 
API</a></li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" 
href="net.html">FeedForward Net</a></li>
+<li class="toctree-l2"><a class="reference internal" 
href="initializer.html">Initializer</a></li>
+<li class="toctree-l2"><a class="reference internal" 
href="loss.html">Loss</a></li>
+<li class="toctree-l2"><a class="reference internal" 
href="metric.html">Metric</a></li>
+<li class="toctree-l2"><a class="reference internal" 
href="optimizer.html">Optimizer</a></li>
+<li class="toctree-l2"><a class="reference internal" 
href="autograd.html">Autograd in Singa</a></li>
+<li class="toctree-l2"><a class="reference internal" 
href="data.html">Data</a></li>
+<li class="toctree-l2"><a class="reference internal" 
href="image_tool.html">Image Tool</a></li>
+<li class="toctree-l2"><a class="reference internal" 
href="snapshot.html">Snapshot</a></li>
+<li class="toctree-l2"><a class="reference internal" 
href="converter.html">Caffe Converter</a></li>
+<li class="toctree-l2"><a class="reference internal" 
href="utils.html">Utils</a></li>
+<li class="toctree-l2"><a class="reference internal" 
href="model_zoo/index.html">Model Zoo</a></li>
+<li class="toctree-l2"><a class="reference internal" 
href="security.html">Security</a></li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" 
href="../downloads.html">Download SINGA</a></li>
+</ul>
+<p class="caption"><span class="caption-text">Development</span></p>
+<ul>
+<li class="toctree-l1"><a class="reference internal" 
href="../develop/schedule.html">Development Schedule</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../develop/how-contribute.html">How to Contribute to SINGA</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../develop/how-contribute.html#how-to-become-a-singa-committer">How to 
become a SINGA committer</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../develop/contribute-code.html">How to Contribute Code</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../develop/contribute-docs.html">How to Contribute to 
Documentation</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../develop/how-to-release.html">How to prepare a release</a></li>
+</ul>
+<p class="caption"><span class="caption-text">Community</span></p>
+<ul>
+<li class="toctree-l1"><a class="reference internal" 
href="../community/source-repository.html">Source Repository</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../community/mail-lists.html">Project Mailing Lists</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../community/issue-tracking.html">Issue Tracking</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../community/team-list.html">The SINGA Team</a></li>
+</ul>
+
+            
+          
+        </div>
+      </div>
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" aria-label="top navigation">
+        
+          <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+          <a href="../index.html">incubator-singa</a>
+        
+      </nav>
+
+
+      <div class="wy-nav-content">
+        
+        <div class="rst-content">
+        
+          
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+<div role="navigation" aria-label="breadcrumbs navigation">
+
+  <ul class="wy-breadcrumbs">
+    
+      <li><a href="../index.html">Docs</a> &raquo;</li>
+        
+          <li><a href="index.html">Documentation</a> &raquo;</li>
+        
+      <li>Layer</li>
+    
+    
+      <li class="wy-breadcrumbs-aside">
+        
+            
+        
+      </li>
+    
+  </ul>
+
+  
+  <hr/>
+</div>
+          <div role="main" class="document" itemscope="itemscope" 
itemtype="http://schema.org/Article";>
+           <div itemprop="articleBody">
+            
+  <div class="section" id="layer">
+<h1>Layer<a class="headerlink" href="#layer" title="Permalink to this 
headline">¶</a></h1>
+<div class="section" id="module-singa.layer">
+<span id="python-api"></span><h2>Python API<a class="headerlink" 
href="#module-singa.layer" title="Permalink to this headline">¶</a></h2>
+<p>Python layers wrap the C++ layers to provide simpler construction APIs.</p>
+<p>Example usages:</p>
+<div class="highlight-default notranslate"><div 
class="highlight"><pre><span></span><span class="kn">from</span> <span 
class="nn">singa</span> <span class="k">import</span> <span 
class="n">layer</span>
+<span class="kn">from</span> <span class="nn">singa</span> <span 
class="k">import</span> <span class="n">tensor</span>
+<span class="kn">from</span> <span class="nn">singa</span> <span 
class="k">import</span> <span class="n">device</span>
+
+<span class="n">layer</span><span class="o">.</span><span 
class="n">engine</span> <span class="o">=</span> <span 
class="s1">&#39;cudnn&#39;</span>  <span class="c1"># to use cudnn layers</span>
+<span class="n">dev</span> <span class="o">=</span> <span 
class="n">device</span><span class="o">.</span><span 
class="n">create_cuda_gpu</span><span class="p">()</span>
+
+<span class="c1"># create a convolution layer</span>
+<span class="n">conv</span> <span class="o">=</span> <span 
class="n">layer</span><span class="o">.</span><span 
class="n">Conv2D</span><span class="p">(</span><span 
class="s1">&#39;conv&#39;</span><span class="p">,</span> <span 
class="mi">32</span><span class="p">,</span> <span class="mi">3</span><span 
class="p">,</span> <span class="mi">1</span><span class="p">,</span> <span 
class="n">pad</span><span class="o">=</span><span class="mi">1</span><span 
class="p">,</span> <span class="n">input_sample_shape</span><span 
class="o">=</span><span class="p">(</span><span class="mi">3</span><span 
class="p">,</span> <span class="mi">32</span><span class="p">,</span> <span 
class="mi">32</span><span class="p">))</span>
+
+<span class="c1"># init param values</span>
+<span class="n">w</span><span class="p">,</span> <span class="n">b</span> 
<span class="o">=</span> <span class="n">conv</span><span 
class="o">.</span><span class="n">param_values</span><span class="p">()</span>
+<span class="n">w</span><span class="o">.</span><span 
class="n">guassian</span><span class="p">(</span><span class="mi">0</span><span 
class="p">,</span> <span class="mf">0.01</span><span class="p">)</span>
+<span class="n">b</span><span class="o">.</span><span 
class="n">set_value</span><span class="p">(</span><span 
class="mi">0</span><span class="p">)</span>
+<span class="n">conv</span><span class="o">.</span><span 
class="n">to_device</span><span class="p">(</span><span 
class="n">dev</span><span class="p">)</span>  <span class="c1"># move the layer 
data onto a CudaGPU device</span>
+
+<span class="n">x</span> <span class="o">=</span> <span 
class="n">tensor</span><span class="o">.</span><span 
class="n">Tensor</span><span class="p">((</span><span class="mi">3</span><span 
class="p">,</span> <span class="mi">32</span><span class="p">,</span> <span 
class="mi">32</span><span class="p">),</span> <span class="n">dev</span><span 
class="p">)</span>
+<span class="n">x</span><span class="o">.</span><span 
class="n">uniform</span><span class="p">(</span><span class="o">-</span><span 
class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span 
class="p">)</span>
+<span class="n">y</span> <span class="o">=</span> <span 
class="n">conv</span><span class="o">.</span><span class="n">foward</span><span 
class="p">(</span><span class="kc">True</span><span class="p">,</span> <span 
class="n">x</span><span class="p">)</span>
+
+<span class="n">dy</span> <span class="o">=</span> <span 
class="n">tensor</span><span class="o">.</span><span 
class="n">Tensor</span><span class="p">()</span>
+<span class="n">dy</span><span class="o">.</span><span 
class="n">reset_like</span><span class="p">(</span><span 
class="n">y</span><span class="p">)</span>
+<span class="n">dy</span><span class="o">.</span><span 
class="n">set_value</span><span class="p">(</span><span 
class="mf">0.1</span><span class="p">)</span>
+<span class="c1"># dp is a list of tensors for parameter gradients</span>
+<span class="n">dx</span><span class="p">,</span> <span class="n">dp</span> 
<span class="o">=</span> <span class="n">conv</span><span 
class="o">.</span><span class="n">backward</span><span class="p">(</span><span 
class="n">kTrain</span><span class="p">,</span> <span class="n">dy</span><span 
class="p">)</span>
+</pre></div>
+</div>
+<dl class="data">
+<dt id="singa.layer.engine">
+<code class="descclassname">singa.layer.</code><code 
class="descname">engine</code><em class="property"> = 'cudnn'</em><a 
class="headerlink" href="#singa.layer.engine" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>engine is the prefix of layer identifier.</p>
+<p>The value could be one of [<strong>‘cudnn’, ‘singacpp’, 
‘singacuda’, ‘singacl’</strong>], for
+layers implemented using the cudnn library, Cpp, Cuda and OpenCL respectively.
+For example, CudnnConvolution layer is identified by ‘cudnn_convolution’;
+‘singacpp_convolution’ is for Convolution layer;
+Some layers’ implementation use only Tensor functions, thererfore they are
+transparent to the underlying devices. For threse layers, they would have
+multiple identifiers, e.g., singacpp_dropout, singacuda_dropout and
+singacl_dropout are all for the Dropout layer. In addition, it has an extra
+identifier ‘singa’, i.e. ‘singa_dropout’ also stands for the Dropout 
layer.</p>
+<p>engine is case insensitive. Each python layer would create the correct 
specific
+layer using the engine attribute.</p>
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.Layer">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">Layer</code><span class="sig-paren">(</span><em>name</em>, 
<em>conf=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.Layer" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Bases: <code class="xref py py-class docutils literal 
notranslate"><span class="pre">object</span></code></p>
+<p>Base Python layer class.</p>
+<dl class="simple">
+<dt>Typically, the life cycle of a layer instance includes:</dt><dd><ol 
class="arabic simple">
+<li><p>construct layer without input_sample_shapes, goto 2;
+construct layer with input_sample_shapes, goto 3;</p></li>
+<li><p>call setup to create the parameters and setup other meta fields</p></li>
+<li><p>call forward or access layer members</p></li>
+<li><p>call backward and get parameters for update</p></li>
+</ol>
+</dd>
+</dl>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><p><strong>name</strong> (<em>str</em>) – layer 
name</p>
+</dd>
+</dl>
+<dl class="method">
+<dt id="singa.layer.Layer.setup">
+<code class="descname">setup</code><span 
class="sig-paren">(</span><em>in_shapes</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.Layer.setup" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Call the C++ setup function to create params and set some meta data.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><p><strong>in_shapes</strong> – if the layer accepts a 
single input Tensor, in_shapes is
+a single tuple specifying the inpute Tensor shape; if the layer
+accepts multiple input Tensor (e.g., the concatenation layer),
+in_shapes is a tuple of tuples, each for one input Tensor</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Layer.caffe_layer">
+<code class="descname">caffe_layer</code><span class="sig-paren">(</span><span 
class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Layer.caffe_layer" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Create a singa layer based on caffe layer configuration.</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Layer.get_output_sample_shape">
+<code class="descname">get_output_sample_shape</code><span 
class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Layer.get_output_sample_shape" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Called after setup to get the shape of the output sample(s).</p>
+<dl class="field-list simple">
+<dt class="field-odd">Returns</dt>
+<dd class="field-odd"><p>a tuple for a single output Tensor or a list of 
tuples if this layer
+has multiple outputs</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Layer.param_names">
+<code class="descname">param_names</code><span class="sig-paren">(</span><span 
class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Layer.param_names" title="Permalink to this 
definition">¶</a></dt>
+<dd><dl class="field-list simple">
+<dt class="field-odd">Returns</dt>
+<dd class="field-odd"><p>a list of strings, one for the name of one parameter 
Tensor</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Layer.param_values">
+<code class="descname">param_values</code><span 
class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Layer.param_values" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Return param value tensors.</p>
+<p>Parameter tensors are not stored as layer members because cpp Tensor
+could be moved onto diff devices due to the change of layer device,
+which would result in inconsistency.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Returns</dt>
+<dd class="field-odd"><p>a list of tensors, one for each paramter</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Layer.forward">
+<code class="descname">forward</code><span 
class="sig-paren">(</span><em>flag</em>, <em>x</em><span 
class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Layer.forward" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Forward propagate through this layer.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>flag</strong> – True (kTrain) for training (kEval); False for 
evaluating;
+other values for furture use.</p></li>
+<li><p><strong>x</strong> (<a class="reference internal" 
href="tensor.html#singa.tensor.Tensor" 
title="singa.tensor.Tensor"><em>Tensor</em></a><em> or 
</em><em>list&lt;Tensor&gt;</em>) – an input tensor if the layer is
+connected from a single layer; a list of tensors if the layer
+is connected from multiple layers.</p></li>
+</ul>
+</dd>
+<dt class="field-even">Returns</dt>
+<dd class="field-even"><p>a tensor if the layer is connected to a single 
layer; a list of
+tensors if the layer is connected to multiple layers;</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Layer.backward">
+<code class="descname">backward</code><span 
class="sig-paren">(</span><em>flag</em>, <em>dy</em><span 
class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Layer.backward" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Backward propagate gradients through this layer.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>flag</strong> (<em>int</em>) – for future use.</p></li>
+<li><p><strong>dy</strong> (<a class="reference internal" 
href="tensor.html#singa.tensor.Tensor" 
title="singa.tensor.Tensor"><em>Tensor</em></a><em> or 
</em><em>list&lt;Tensor&gt;</em>) – the gradient tensor(s) y w.r.t the
+objective loss</p></li>
+</ul>
+</dd>
+<dt class="field-even">Returns</dt>
+<dd class="field-even"><p>&lt;dx, &lt;dp1, dp2..&gt;&gt;, dx is a (set of) 
tensor(s) for the gradient of x
+, dpi is the gradient of the i-th parameter</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Layer.to_device">
+<code class="descname">to_device</code><span 
class="sig-paren">(</span><em>device</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.Layer.to_device" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Move layer state tensors onto the given device.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><p><strong>device</strong> – swig converted device, 
created using singa.device</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Layer.as_type">
+<code class="descname">as_type</code><span 
class="sig-paren">(</span><em>dtype</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.Layer.as_type" title="Permalink to this 
definition">¶</a></dt>
+<dd></dd></dl>
+
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.Dummy">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">Dummy</code><span class="sig-paren">(</span><em>name</em>, 
<em>input_sample_shape=None</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.Dummy" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.Layer" 
title="singa.layer.Layer"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.Layer</span></code></a></p>
+<p>A dummy layer that does nothing but just forwards/backwards the data
+(the input/output is a single tensor).</p>
+<dl class="method">
+<dt id="singa.layer.Dummy.get_output_sample_shape">
+<code class="descname">get_output_sample_shape</code><span 
class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Dummy.get_output_sample_shape" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Called after setup to get the shape of the output sample(s).</p>
+<dl class="field-list simple">
+<dt class="field-odd">Returns</dt>
+<dd class="field-odd"><p>a tuple for a single output Tensor or a list of 
tuples if this layer
+has multiple outputs</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Dummy.setup">
+<code class="descname">setup</code><span 
class="sig-paren">(</span><em>input_sample_shape</em><span 
class="sig-paren">)</span><a class="headerlink" href="#singa.layer.Dummy.setup" 
title="Permalink to this definition">¶</a></dt>
+<dd><p>Call the C++ setup function to create params and set some meta data.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><p><strong>in_shapes</strong> – if the layer accepts a 
single input Tensor, in_shapes is
+a single tuple specifying the inpute Tensor shape; if the layer
+accepts multiple input Tensor (e.g., the concatenation layer),
+in_shapes is a tuple of tuples, each for one input Tensor</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Dummy.forward">
+<code class="descname">forward</code><span 
class="sig-paren">(</span><em>flag</em>, <em>x</em><span 
class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Dummy.forward" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Return the input x</p>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Dummy.backward">
+<code class="descname">backward</code><span 
class="sig-paren">(</span><em>falg</em>, <em>dy</em><span 
class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Dummy.backward" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Return dy, []</p>
+</dd></dl>
+
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.Conv2D">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">Conv2D</code><span class="sig-paren">(</span><em>name</em>, 
<em>nb_kernels</em>, <em>kernel=3</em>, <em>stride=1</em>, 
<em>border_mode='same'</em>, <em>cudnn_prefer='fastest'</em>, 
<em>workspace_byte_limit=1024</em>, <em>data_format='NCHW'</em>, 
<em>use_bias=True</em>, <em>W_specs=None</em>, <em>b_specs=None</em>, 
<em>pad=None</em>, <em>input_sample_shape=None</em><span 
class="sig-paren">)</span><a class="headerlink" href="#singa.layer.Conv2D" 
title="Permalink to this definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.Layer" 
title="singa.layer.Layer"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.Layer</span></code></a></p>
+<p>Construct a layer for 2D convolution.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>nb_kernels</strong> (<em>int</em>) – num of the channels 
(kernels) of the input Tensor</p></li>
+<li><p><strong>kernel</strong> – an integer or a pair of integers for kernel 
height and width</p></li>
+<li><p><strong>stride</strong> – an integer or a pair of integers for stride 
height and width</p></li>
+<li><p><strong>border_mode</strong> (<em>string</em>) – padding mode, case 
in-sensitive,
+‘valid’ -&gt; padding is 0 for height and width
+‘same’ -&gt; padding is half of the kernel (floor), the kernel must be
+odd number.</p></li>
+<li><p><strong>cudnn_prefer</strong> (<em>string</em>) – the preferred 
algorithm for cudnn convolution
+which could be ‘fastest’, ‘autotune’, ‘limited_workspace’ and
+‘no_workspace’</p></li>
+<li><p><strong>workspace_byte_limit</strong> (<em>int</em>) – max workspace 
size in MB (default is 512MB)</p></li>
+<li><p><strong>data_format</strong> (<em>string</em>) – either ‘NCHW’ or 
‘NHWC’</p></li>
+<li><p><strong>use_bias</strong> (<em>bool</em>) – True or False</p></li>
+<li><p><strong>pad</strong> – an integer or a pair of integers for padding 
height and width</p></li>
+<li><p><strong>W_specs</strong> (<em>dict</em>) – used to specify the weight 
matrix specs, fields
+include,
+‘name’ for parameter name
+‘lr_mult’ for learning rate multiplier
+‘decay_mult’ for weight decay multiplier
+‘init’ for init method, which could be ‘gaussian’, ‘uniform’,
+‘xavier’ and ‘’
+‘std’, ‘mean’, ‘high’, ‘low’ for corresponding init methods
+TODO(wangwei) ‘clamp’ for gradient constraint, value is scalar
+‘regularizer’ for regularization, currently support ‘l2’</p></li>
+<li><p><strong>b_specs</strong> (<em>dict</em>) – hyper-parameters for bias 
vector, similar as W_specs</p></li>
+<li><p><strong>name</strong> (<em>string</em>) – layer name.</p></li>
+<li><p><strong>input_sample_shape</strong> – 3d tuple for the shape of the 
input Tensor
+without the batchsize, e.g., (channel, height, width) or
+(height, width, channel)</p></li>
+</ul>
+</dd>
+</dl>
+<dl class="method">
+<dt id="singa.layer.Conv2D.setup">
+<code class="descname">setup</code><span 
class="sig-paren">(</span><em>in_shape</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.Conv2D.setup" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Set up the kernel, stride and padding; then call the C++ setup
+function to create params and set some meta data.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><p><strong>is a tuple of int for the input sample 
shape</strong> (<em>in_shapes</em>) – </p>
+</dd>
+</dl>
+</dd></dl>
+
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.Conv1D">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">Conv1D</code><span class="sig-paren">(</span><em>name</em>, 
<em>nb_kernels</em>, <em>kernel=3</em>, <em>stride=1</em>, 
<em>border_mode='same'</em>, <em>cudnn_prefer='fastest'</em>, 
<em>workspace_byte_limit=1024</em>, <em>use_bias=True</em>, 
<em>W_specs={'init': 'Xavier'}</em>, <em>b_specs={'init': 'Constant'</em>, 
<em>'value': 0}</em>, <em>pad=None</em>, <em>input_sample_shape=None</em><span 
class="sig-paren">)</span><a class="headerlink" href="#singa.layer.Conv1D" 
title="Permalink to this definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.Conv2D" 
title="singa.layer.Conv2D"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.Conv2D</span></code></a></p>
+<p>Construct a layer for 1D convolution.</p>
+<p>Most of the args are the same as those for Conv2D except the kernel,
+stride, pad, which is a scalar instead of a tuple.
+input_sample_shape is a tuple with a single value for the input feature
+length</p>
+<dl class="method">
+<dt id="singa.layer.Conv1D.get_output_sample_shape">
+<code class="descname">get_output_sample_shape</code><span 
class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Conv1D.get_output_sample_shape" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Called after setup to get the shape of the output sample(s).</p>
+<dl class="field-list simple">
+<dt class="field-odd">Returns</dt>
+<dd class="field-odd"><p>a tuple for a single output Tensor or a list of 
tuples if this layer
+has multiple outputs</p>
+</dd>
+</dl>
+</dd></dl>
+
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.Pooling2D">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">Pooling2D</code><span class="sig-paren">(</span><em>name</em>, 
<em>mode</em>, <em>kernel=3</em>, <em>stride=2</em>, 
<em>border_mode='same'</em>, <em>pad=None</em>, <em>data_format='NCHW'</em>, 
<em>input_sample_shape=None</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.Pooling2D" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.Layer" 
title="singa.layer.Layer"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.Layer</span></code></a></p>
+<p>2D pooling layer providing max/avg pooling.</p>
+<p>All args are the same as those for Conv2D, except the following one</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><p><strong>mode</strong> – pooling type, 
model_pb2.PoolingConf.MAX or
+model_pb2.PoolingConf.AVE</p>
+</dd>
+</dl>
+<dl class="method">
+<dt id="singa.layer.Pooling2D.setup">
+<code class="descname">setup</code><span 
class="sig-paren">(</span><em>in_shape</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.Pooling2D.setup" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Set up the kernel, stride and padding; then call the C++ setup
+function to create params and set some meta data.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><p><strong>is a tuple of int for the input sample 
shape</strong> (<em>in_shapes</em>) – </p>
+</dd>
+</dl>
+</dd></dl>
+
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.MaxPooling2D">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">MaxPooling2D</code><span 
class="sig-paren">(</span><em>name</em>, <em>kernel=3</em>, <em>stride=2</em>, 
<em>border_mode='same'</em>, <em>pad=None</em>, <em>data_format='NCHW'</em>, 
<em>input_sample_shape=None</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.MaxPooling2D" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.Pooling2D" 
title="singa.layer.Pooling2D"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.Pooling2D</span></code></a></p>
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.AvgPooling2D">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">AvgPooling2D</code><span 
class="sig-paren">(</span><em>name</em>, <em>kernel=3</em>, <em>stride=2</em>, 
<em>border_mode='same'</em>, <em>pad=None</em>, <em>data_format='NCHW'</em>, 
<em>input_sample_shape=None</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.AvgPooling2D" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.Pooling2D" 
title="singa.layer.Pooling2D"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.Pooling2D</span></code></a></p>
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.MaxPooling1D">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">MaxPooling1D</code><span 
class="sig-paren">(</span><em>name</em>, <em>kernel=3</em>, <em>stride=2</em>, 
<em>border_mode='same'</em>, <em>pad=None</em>, <em>data_format='NCHW'</em>, 
<em>input_sample_shape=None</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.MaxPooling1D" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.MaxPooling2D" 
title="singa.layer.MaxPooling2D"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.MaxPooling2D</span></code></a></p>
+<dl class="method">
+<dt id="singa.layer.MaxPooling1D.get_output_sample_shape">
+<code class="descname">get_output_sample_shape</code><span 
class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.MaxPooling1D.get_output_sample_shape" title="Permalink to 
this definition">¶</a></dt>
+<dd><p>Called after setup to get the shape of the output sample(s).</p>
+<dl class="field-list simple">
+<dt class="field-odd">Returns</dt>
+<dd class="field-odd"><p>a tuple for a single output Tensor or a list of 
tuples if this layer
+has multiple outputs</p>
+</dd>
+</dl>
+</dd></dl>
+
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.AvgPooling1D">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">AvgPooling1D</code><span 
class="sig-paren">(</span><em>name</em>, <em>kernel=3</em>, <em>stride=2</em>, 
<em>border_mode='same'</em>, <em>pad=None</em>, <em>data_format='NCHW'</em>, 
<em>input_sample_shape=None</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.AvgPooling1D" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.AvgPooling2D" 
title="singa.layer.AvgPooling2D"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.AvgPooling2D</span></code></a></p>
+<dl class="method">
+<dt id="singa.layer.AvgPooling1D.get_output_sample_shape">
+<code class="descname">get_output_sample_shape</code><span 
class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.AvgPooling1D.get_output_sample_shape" title="Permalink to 
this definition">¶</a></dt>
+<dd><p>Called after setup to get the shape of the output sample(s).</p>
+<dl class="field-list simple">
+<dt class="field-odd">Returns</dt>
+<dd class="field-odd"><p>a tuple for a single output Tensor or a list of 
tuples if this layer
+has multiple outputs</p>
+</dd>
+</dl>
+</dd></dl>
+
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.BatchNormalization">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">BatchNormalization</code><span 
class="sig-paren">(</span><em>name</em>, <em>momentum=0.9</em>, 
<em>beta_specs=None</em>, <em>gamma_specs=None</em>, 
<em>input_sample_shape=None</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.BatchNormalization" title="Permalink to 
this definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.Layer" 
title="singa.layer.Layer"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.Layer</span></code></a></p>
+<p>Batch-normalization.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>momentum</strong> (<em>float</em>) – for running average mean 
and variance.</p></li>
+<li><p><strong>beta_specs</strong> (<em>dict</em>) – dictionary includes the 
fields for the beta
+param:
+‘name’ for parameter name
+‘lr_mult’ for learning rate multiplier
+‘decay_mult’ for weight decay multiplier
+‘init’ for init method, which could be ‘gaussian’, ‘uniform’,
+‘xavier’ and ‘’
+‘std’, ‘mean’, ‘high’, ‘low’ for corresponding init methods
+‘clamp’ for gradient constraint, value is scalar
+‘regularizer’ for regularization, currently support ‘l2’</p></li>
+<li><p><strong>gamma_specs</strong> (<em>dict</em>) – similar to beta_specs, 
but for the gamma param.</p></li>
+<li><p><strong>name</strong> (<em>string</em>) – layer name</p></li>
+<li><p><strong>input_sample_shape</strong> (<em>tuple</em>) – with at least 
one integer</p></li>
+</ul>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.L2Norm">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">L2Norm</code><span class="sig-paren">(</span><em>name</em>, 
<em>input_sample_shape</em>, <em>epsilon=1e-08</em><span 
class="sig-paren">)</span><a class="headerlink" href="#singa.layer.L2Norm" 
title="Permalink to this definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.Layer" 
title="singa.layer.Layer"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.Layer</span></code></a></p>
+<p>Normalize each sample to have L2 norm = 1</p>
+<dl class="method">
+<dt id="singa.layer.L2Norm.get_output_sample_shape">
+<code class="descname">get_output_sample_shape</code><span 
class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.L2Norm.get_output_sample_shape" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Called after setup to get the shape of the output sample(s).</p>
+<dl class="field-list simple">
+<dt class="field-odd">Returns</dt>
+<dd class="field-odd"><p>a tuple for a single output Tensor or a list of 
tuples if this layer
+has multiple outputs</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.L2Norm.forward">
+<code class="descname">forward</code><span 
class="sig-paren">(</span><em>is_train</em>, <em>x</em><span 
class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.L2Norm.forward" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Forward propagate through this layer.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>flag</strong> – True (kTrain) for training (kEval); False for 
evaluating;
+other values for furture use.</p></li>
+<li><p><strong>x</strong> (<a class="reference internal" 
href="tensor.html#singa.tensor.Tensor" 
title="singa.tensor.Tensor"><em>Tensor</em></a><em> or 
</em><em>list&lt;Tensor&gt;</em>) – an input tensor if the layer is
+connected from a single layer; a list of tensors if the layer
+is connected from multiple layers.</p></li>
+</ul>
+</dd>
+<dt class="field-even">Returns</dt>
+<dd class="field-even"><p>a tensor if the layer is connected to a single 
layer; a list of
+tensors if the layer is connected to multiple layers;</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.L2Norm.backward">
+<code class="descname">backward</code><span 
class="sig-paren">(</span><em>is_train</em>, <em>dy</em><span 
class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.L2Norm.backward" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Backward propagate gradients through this layer.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>flag</strong> (<em>int</em>) – for future use.</p></li>
+<li><p><strong>dy</strong> (<a class="reference internal" 
href="tensor.html#singa.tensor.Tensor" 
title="singa.tensor.Tensor"><em>Tensor</em></a><em> or 
</em><em>list&lt;Tensor&gt;</em>) – the gradient tensor(s) y w.r.t the
+objective loss</p></li>
+</ul>
+</dd>
+<dt class="field-even">Returns</dt>
+<dd class="field-even"><p>&lt;dx, &lt;dp1, dp2..&gt;&gt;, dx is a (set of) 
tensor(s) for the gradient of x
+, dpi is the gradient of the i-th parameter</p>
+</dd>
+</dl>
+</dd></dl>
+
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.LRN">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code class="descname">LRN</code><span 
class="sig-paren">(</span><em>name</em>, <em>size=5</em>, <em>alpha=1</em>, 
<em>beta=0.75</em>, <em>mode='cross_channel'</em>, <em>k=1</em>, 
<em>input_sample_shape=None</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.LRN" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.Layer" 
title="singa.layer.Layer"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.Layer</span></code></a></p>
+<p>Local response normalization.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>size</strong> (<em>int</em>) – # of channels to be crossed
+normalization.</p></li>
+<li><p><strong>mode</strong> (<em>string</em>) – ‘cross_channel’</p></li>
+<li><p><strong>input_sample_shape</strong> (<em>tuple</em>) – 3d tuple, 
(channel, height, width)</p></li>
+</ul>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.Dense">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">Dense</code><span class="sig-paren">(</span><em>name</em>, 
<em>num_output</em>, <em>use_bias=True</em>, <em>W_specs=None</em>, 
<em>b_specs=None</em>, <em>W_transpose=False</em>, 
<em>input_sample_shape=None</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.Dense" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.Layer" 
title="singa.layer.Layer"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.Layer</span></code></a></p>
+<p>Apply linear/affine transformation, also called inner-product or
+fully connected layer.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>num_output</strong> (<em>int</em>) – output feature 
length.</p></li>
+<li><p><strong>use_bias</strong> (<em>bool</em>) – add a bias vector or not 
to the transformed feature</p></li>
+<li><p><strong>W_specs</strong> (<em>dict</em>) – specs for the weight matrix
+‘name’ for parameter name
+‘lr_mult’ for learning rate multiplier
+‘decay_mult’ for weight decay multiplier
+‘init’ for init method, which could be ‘gaussian’, ‘uniform’,
+‘xavier’ and ‘’
+‘std’, ‘mean’, ‘high’, ‘low’ for corresponding init methods
+‘clamp’ for gradient constraint, value is scalar
+‘regularizer’ for regularization, currently support ‘l2’</p></li>
+<li><p><strong>b_specs</strong> (<em>dict</em>) – specs for the bias vector, 
same fields as W_specs.</p></li>
+<li><p><strong>W_transpose</strong> (<em>bool</em>) – if true, 
output=x*W.T+b;</p></li>
+<li><p><strong>input_sample_shape</strong> (<em>tuple</em>) – input feature 
length</p></li>
+</ul>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.Dropout">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">Dropout</code><span class="sig-paren">(</span><em>name</em>, 
<em>p=0.5</em>, <em>input_sample_shape=None</em><span 
class="sig-paren">)</span><a class="headerlink" href="#singa.layer.Dropout" 
title="Permalink to this definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.Layer" 
title="singa.layer.Layer"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.Layer</span></code></a></p>
+<p>Droput layer.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>p</strong> (<em>float</em>) – probability for dropping out 
the element, i.e., set to 0</p></li>
+<li><p><strong>name</strong> (<em>string</em>) – layer name</p></li>
+</ul>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.Activation">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">Activation</code><span 
class="sig-paren">(</span><em>name</em>, <em>mode='relu'</em>, 
<em>input_sample_shape=None</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.Activation" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.Layer" 
title="singa.layer.Layer"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.Layer</span></code></a></p>
+<p>Activation layers.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>name</strong> (<em>string</em>) – layer name</p></li>
+<li><p><strong>mode</strong> (<em>string</em>) – ‘relu’, ‘sigmoid’, 
or ‘tanh’</p></li>
+<li><p><strong>input_sample_shape</strong> (<em>tuple</em>) – shape of a 
single sample</p></li>
+</ul>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.Softmax">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">Softmax</code><span class="sig-paren">(</span><em>name</em>, 
<em>axis=1</em>, <em>input_sample_shape=None</em><span 
class="sig-paren">)</span><a class="headerlink" href="#singa.layer.Softmax" 
title="Permalink to this definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.Layer" 
title="singa.layer.Layer"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.Layer</span></code></a></p>
+<p>Apply softmax.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>axis</strong> (<em>int</em>) – reshape the input as a matrix 
with the dimension
+[0,axis) as the row, the [axis, -1) as the column.</p></li>
+<li><p><strong>input_sample_shape</strong> (<em>tuple</em>) – shape of a 
single sample</p></li>
+</ul>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.Flatten">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">Flatten</code><span class="sig-paren">(</span><em>name</em>, 
<em>axis=1</em>, <em>input_sample_shape=None</em><span 
class="sig-paren">)</span><a class="headerlink" href="#singa.layer.Flatten" 
title="Permalink to this definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.Layer" 
title="singa.layer.Layer"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.Layer</span></code></a></p>
+<p>Reshape the input tensor into a matrix.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>axis</strong> (<em>int</em>) – reshape the input as a matrix 
with the dimension
+[0,axis) as the row, the [axis, -1) as the column.</p></li>
+<li><p><strong>input_sample_shape</strong> (<em>tuple</em>) – shape for a 
single sample</p></li>
+</ul>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.Merge">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">Merge</code><span class="sig-paren">(</span><em>name</em>, 
<em>input_sample_shape=None</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.Merge" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.Layer" 
title="singa.layer.Layer"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.Layer</span></code></a></p>
+<p>Sum all input tensors.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><p><strong>input_sample_shape</strong> – sample shape 
of the input. The sample shape of all
+inputs should be the same.</p>
+</dd>
+</dl>
+<dl class="method">
+<dt id="singa.layer.Merge.setup">
+<code class="descname">setup</code><span 
class="sig-paren">(</span><em>in_shape</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.Merge.setup" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Call the C++ setup function to create params and set some meta data.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><p><strong>in_shapes</strong> – if the layer accepts a 
single input Tensor, in_shapes is
+a single tuple specifying the inpute Tensor shape; if the layer
+accepts multiple input Tensor (e.g., the concatenation layer),
+in_shapes is a tuple of tuples, each for one input Tensor</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Merge.get_output_sample_shape">
+<code class="descname">get_output_sample_shape</code><span 
class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Merge.get_output_sample_shape" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Called after setup to get the shape of the output sample(s).</p>
+<dl class="field-list simple">
+<dt class="field-odd">Returns</dt>
+<dd class="field-odd"><p>a tuple for a single output Tensor or a list of 
tuples if this layer
+has multiple outputs</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Merge.forward">
+<code class="descname">forward</code><span 
class="sig-paren">(</span><em>flag</em>, <em>inputs</em><span 
class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Merge.forward" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Merge all input tensors by summation.</p>
+<p>TODO(wangwei) do element-wise merge operations, e.g., avg, count
+:param flag: not used.
+:param inputs: a list of tensors
+:type inputs: list</p>
+<dl class="field-list simple">
+<dt class="field-odd">Returns</dt>
+<dd class="field-odd"><p>A single tensor as the sum of all input tensors</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Merge.backward">
+<code class="descname">backward</code><span 
class="sig-paren">(</span><em>flag</em>, <em>grad</em><span 
class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Merge.backward" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Replicate the grad for each input source layer.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><p><strong>grad</strong> (<a class="reference internal" 
href="tensor.html#singa.tensor.Tensor" 
title="singa.tensor.Tensor"><em>Tensor</em></a>) – </p>
+</dd>
+<dt class="field-even">Returns</dt>
+<dd class="field-even"><p>A list of replicated grad, one per source layer</p>
+</dd>
+</dl>
+</dd></dl>
+
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.Split">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">Split</code><span class="sig-paren">(</span><em>name</em>, 
<em>num_output</em>, <em>input_sample_shape=None</em><span 
class="sig-paren">)</span><a class="headerlink" href="#singa.layer.Split" 
title="Permalink to this definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.Layer" 
title="singa.layer.Layer"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.Layer</span></code></a></p>
+<p>Replicate the input tensor.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>num_output</strong> (<em>int</em>) – number of output tensors 
to generate.</p></li>
+<li><p><strong>input_sample_shape</strong> – includes a single integer for 
the input sample
+feature size.</p></li>
+</ul>
+</dd>
+</dl>
+<dl class="method">
+<dt id="singa.layer.Split.setup">
+<code class="descname">setup</code><span 
class="sig-paren">(</span><em>in_shape</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.Split.setup" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Call the C++ setup function to create params and set some meta data.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><p><strong>in_shapes</strong> – if the layer accepts a 
single input Tensor, in_shapes is
+a single tuple specifying the inpute Tensor shape; if the layer
+accepts multiple input Tensor (e.g., the concatenation layer),
+in_shapes is a tuple of tuples, each for one input Tensor</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Split.get_output_sample_shape">
+<code class="descname">get_output_sample_shape</code><span 
class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Split.get_output_sample_shape" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Called after setup to get the shape of the output sample(s).</p>
+<dl class="field-list simple">
+<dt class="field-odd">Returns</dt>
+<dd class="field-odd"><p>a tuple for a single output Tensor or a list of 
tuples if this layer
+has multiple outputs</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Split.forward">
+<code class="descname">forward</code><span 
class="sig-paren">(</span><em>flag</em>, <em>input</em><span 
class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Split.forward" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Replicate the input tensor into mutiple tensors.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>flag</strong> – not used</p></li>
+<li><p><strong>input</strong> – a single input tensor</p></li>
+</ul>
+</dd>
+<dt class="field-even">Returns</dt>
+<dd class="field-even"><p>a list a output tensor (each one is a copy of the 
input)</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Split.backward">
+<code class="descname">backward</code><span 
class="sig-paren">(</span><em>flag</em>, <em>grads</em><span 
class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Split.backward" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Sum all grad tensors to generate a single output tensor.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><p><strong>grads</strong> (<em>list of Tensor</em>) – 
</p>
+</dd>
+<dt class="field-even">Returns</dt>
+<dd class="field-even"><p>a single tensor as the sum of all grads</p>
+</dd>
+</dl>
+</dd></dl>
+
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.Concat">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">Concat</code><span class="sig-paren">(</span><em>name</em>, 
<em>axis</em>, <em>input_sample_shapes=None</em><span 
class="sig-paren">)</span><a class="headerlink" href="#singa.layer.Concat" 
title="Permalink to this definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.Layer" 
title="singa.layer.Layer"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.Layer</span></code></a></p>
+<p>Concatenate tensors vertically (axis = 0) or horizontally (axis = 1).</p>
+<p>Currently, only support tensors with 2 dimensions.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>axis</strong> (<em>int</em>) – 0 for concat row; 1 for concat 
columns;</p></li>
+<li><p><strong>input_sample_shapes</strong> – a list of sample shape tuples, 
one per input tensor</p></li>
+</ul>
+</dd>
+</dl>
+<dl class="method">
+<dt id="singa.layer.Concat.forward">
+<code class="descname">forward</code><span 
class="sig-paren">(</span><em>flag</em>, <em>inputs</em><span 
class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Concat.forward" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Concatenate all input tensors.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>flag</strong> – same as Layer::forward()</p></li>
+<li><p><strong>input</strong> – a list of tensors</p></li>
+</ul>
+</dd>
+<dt class="field-even">Returns</dt>
+<dd class="field-even"><p>a single concatenated tensor</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Concat.backward">
+<code class="descname">backward</code><span 
class="sig-paren">(</span><em>flag</em>, <em>dy</em><span 
class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Concat.backward" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Backward propagate gradients through this layer.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>flag</strong> – same as Layer::backward()</p></li>
+<li><p><strong>dy</strong> (<a class="reference internal" 
href="tensor.html#singa.tensor.Tensor" 
title="singa.tensor.Tensor"><em>Tensor</em></a>) – the gradient tensors of y 
w.r.t objective loss</p></li>
+</ul>
+</dd>
+<dt class="field-even">Returns</dt>
+<dd class="field-even"><p><dl class="simple">
+<dt>&lt;dx, []&gt;, dx is a list tensors for the gradient of the inputs; 
[]</dt><dd><p>is an empty list.</p>
+</dd>
+</dl>
+</p>
+</dd>
+</dl>
+</dd></dl>
+
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.Slice">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">Slice</code><span class="sig-paren">(</span><em>name</em>, 
<em>axis</em>, <em>slice_point</em>, <em>input_sample_shape=None</em><span 
class="sig-paren">)</span><a class="headerlink" href="#singa.layer.Slice" 
title="Permalink to this definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.Layer" 
title="singa.layer.Layer"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.Layer</span></code></a></p>
+<p>Slice the input tensor into multiple sub-tensors vertially (axis=0) or
+horizontally (axis=1).</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>axis</strong> (<em>int</em>) – 0 for slice rows; 1 for slice 
columns;</p></li>
+<li><p><strong>slice_point</strong> (<em>list</em>) – positions along the 
axis to do slice; there are n-1
+points for n sub-tensors;</p></li>
+<li><p><strong>input_sample_shape</strong> – input tensor sample 
shape</p></li>
+</ul>
+</dd>
+</dl>
+<dl class="method">
+<dt id="singa.layer.Slice.get_output_sample_shape">
+<code class="descname">get_output_sample_shape</code><span 
class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Slice.get_output_sample_shape" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Called after setup to get the shape of the output sample(s).</p>
+<dl class="field-list simple">
+<dt class="field-odd">Returns</dt>
+<dd class="field-odd"><p>a tuple for a single output Tensor or a list of 
tuples if this layer
+has multiple outputs</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Slice.forward">
+<code class="descname">forward</code><span 
class="sig-paren">(</span><em>flag</em>, <em>x</em><span 
class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Slice.forward" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Slice the input tensor on the given axis.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>flag</strong> – same as Layer::forward()</p></li>
+<li><p><strong>x</strong> – a single input tensor</p></li>
+</ul>
+</dd>
+<dt class="field-even">Returns</dt>
+<dd class="field-even"><p>a list a output tensor</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.Slice.backward">
+<code class="descname">backward</code><span 
class="sig-paren">(</span><em>flag</em>, <em>grads</em><span 
class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.Slice.backward" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Concate all grad tensors to generate a single output tensor</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>flag</strong> – same as Layer::backward()</p></li>
+<li><p><strong>grads</strong> – a list of tensors, one for the gradient of 
one sliced tensor</p></li>
+</ul>
+</dd>
+<dt class="field-even">Returns</dt>
+<dd class="field-even"><p><dl class="simple">
+<dt>a single tensor for the gradient of the original user, and an 
empty</dt><dd><p>list.</p>
+</dd>
+</dl>
+</p>
+</dd>
+</dl>
+</dd></dl>
+
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.RNN">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code class="descname">RNN</code><span 
class="sig-paren">(</span><em>name</em>, <em>hidden_size</em>, 
<em>rnn_mode='lstm'</em>, <em>dropout=0.0</em>, <em>num_stacks=1</em>, 
<em>input_mode='linear'</em>, <em>bidirectional=False</em>, 
<em>param_specs=None</em>, <em>input_sample_shape=None</em><span 
class="sig-paren">)</span><a class="headerlink" href="#singa.layer.RNN" 
title="Permalink to this definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.Layer" 
title="singa.layer.Layer"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.Layer</span></code></a></p>
+<p>Recurrent layer with 4 types of units, namely lstm, gru, tanh and relu.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>hidden_size</strong> – hidden feature size, the same for all 
stacks of layers.</p></li>
+<li><p><strong>rnn_mode</strong> – decides the rnn unit, which could be one 
of ‘lstm’, ‘gru’,
+‘tanh’ and ‘relu’, refer to cudnn manual for each mode.</p></li>
+<li><p><strong>num_stacks</strong> – num of stacks of rnn layers. It is 
different to the
+unrolling seqence length.</p></li>
+<li><p><strong>input_mode</strong> – ‘linear’ convert the input feature 
x by by a linear
+transformation to get a feature vector of size hidden_size;
+‘skip’ does nothing but requires the input feature size equals
+hidden_size</p></li>
+<li><p><strong>bidirection</strong> – True for bidirectional RNN</p></li>
+<li><p><strong>param_specs</strong> – config for initializing the RNN 
parameters.</p></li>
+<li><p><strong>input_sample_shape</strong> – includes a single integer for 
the input sample
+feature size.</p></li>
+</ul>
+</dd>
+</dl>
+<dl class="method">
+<dt id="singa.layer.RNN.forward">
+<code class="descname">forward</code><span 
class="sig-paren">(</span><em>flag</em>, <em>inputs</em><span 
class="sig-paren">)</span><a class="headerlink" href="#singa.layer.RNN.forward" 
title="Permalink to this definition">¶</a></dt>
+<dd><p>Forward inputs through the RNN.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>flag</strong> – True(kTrain) for training; False(kEval) for 
evaluation;
+others values for future use.</p></li>
+<li><p><strong>&lt;x1</strong><strong>, 
</strong><strong>x2</strong><strong>,</strong><strong>..xn</strong><strong>, 
</strong><strong>hx</strong><strong>, </strong><strong>cx&gt;</strong><strong>, 
</strong><strong>where xi is the input tensor for the</strong> 
(<em>inputs</em><em>,</em>) – i-th position, its shape is (batch_size, 
input_feature_length);
+the batch_size of xi must &gt;= that of xi+1; hx is the initial
+hidden state of shape (num_stacks * bidirection?2:1, batch_size,
+hidden_size). cx is the initial cell state tensor of the same
+shape as hy. cx is valid for only lstm. For other RNNs there is
+no cx. Both hx and cx could be dummy tensors without shape and
+data.</p></li>
+</ul>
+</dd>
+<dt class="field-even">Returns</dt>
+<dd class="field-even"><p><dl class="simple">
+<dt>&lt;y1, y2, … yn, hy, cy&gt;, where yi is the output tensor for the 
i-th</dt><dd><p>position, its shape is (batch_size,
+hidden_size * bidirection?2:1). hy is the final hidden state
+tensor. cx is the final cell state tensor. cx is only used for
+lstm.</p>
+</dd>
+</dl>
+</p>
+</dd>
+</dl>
+</dd></dl>
+
+<dl class="method">
+<dt id="singa.layer.RNN.backward">
+<code class="descname">backward</code><span 
class="sig-paren">(</span><em>flag</em>, <em>grad</em><span 
class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.RNN.backward" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Backward gradients through the RNN.</p>
+<dl class="field-list simple">
+<dt class="field-odd">Parameters</dt>
+<dd class="field-odd"><ul class="simple">
+<li><p><strong>for future use.</strong> (<em>flag</em><em>,</em>) – </p></li>
+<li><p><strong>&lt;dy1</strong><strong>, 
</strong><strong>dy2</strong><strong>,</strong><strong>..dyn</strong><strong>, 
</strong><strong>dhy</strong><strong>, 
</strong><strong>dcy&gt;</strong><strong>, </strong><strong>where dyi is the 
gradient for the</strong> (<em>grad</em><em>,</em>) – </p></li>
+<li><p><strong>output</strong><strong>, </strong><strong>its shape 
is</strong><strong> (</strong><strong>batch_size</strong><strong>, 
</strong><strong>hidden_size*bidirection?2</strong> (<em>i-th</em>) – 1);
+dhy is the gradient for the final hidden state, its shape is
+(num_stacks * bidirection?2:1, batch_size,
+hidden_size). dcy is the gradient for the final cell state.
+cx is valid only for lstm. For other RNNs there is
+no cx. Both dhy and dcy could be dummy tensors without shape and
+data.</p></li>
+</ul>
+</dd>
+<dt class="field-even">Returns</dt>
+<dd class="field-even"><p><dl class="simple">
+<dt>&lt;dx1, dx2, … dxn, dhx, dcx&gt;, where dxi is the gradient tensor 
for</dt><dd><p>the i-th input, its shape is (batch_size,
+input_feature_length). dhx is the gradient for the initial
+hidden state. dcx is the gradient for the initial cell state,
+which is valid only for lstm.</p>
+</dd>
+</dl>
+</p>
+</dd>
+</dl>
+</dd></dl>
+
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.LSTM">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code 
class="descname">LSTM</code><span class="sig-paren">(</span><em>name</em>, 
<em>hidden_size</em>, <em>dropout=0.0</em>, <em>num_stacks=1</em>, 
<em>input_mode='linear'</em>, <em>bidirectional=False</em>, 
<em>param_specs=None</em>, <em>input_sample_shape=None</em><span 
class="sig-paren">)</span><a class="headerlink" href="#singa.layer.LSTM" 
title="Permalink to this definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.RNN" 
title="singa.layer.RNN"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.RNN</span></code></a></p>
+</dd></dl>
+
+<dl class="class">
+<dt id="singa.layer.GRU">
+<em class="property">class </em><code 
class="descclassname">singa.layer.</code><code class="descname">GRU</code><span 
class="sig-paren">(</span><em>name</em>, <em>hidden_size</em>, 
<em>dropout=0.0</em>, <em>num_stacks=1</em>, <em>input_mode='linear'</em>, 
<em>bidirectional=False</em>, <em>param_specs=None</em>, 
<em>input_sample_shape=None</em><span class="sig-paren">)</span><a 
class="headerlink" href="#singa.layer.GRU" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Bases: <a class="reference internal" href="#singa.layer.RNN" 
title="singa.layer.RNN"><code class="xref py py-class docutils literal 
notranslate"><span class="pre">singa.layer.RNN</span></code></a></p>
+</dd></dl>
+
+<dl class="function">
+<dt id="singa.layer.get_layer_list">
+<code class="descclassname">singa.layer.</code><code 
class="descname">get_layer_list</code><span class="sig-paren">(</span><span 
class="sig-paren">)</span><a class="headerlink" 
href="#singa.layer.get_layer_list" title="Permalink to this 
definition">¶</a></dt>
+<dd><p>Return a list of strings which include the identifiers (tags) of all
+supported layers</p>
+</dd></dl>
+
+</div>
+<div class="section" id="cpp-api">
+<h2>CPP API<a class="headerlink" href="#cpp-api" title="Permalink to this 
headline">¶</a></h2>
+</div>
+</div>
+
+
+           </div>
+           
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer 
navigation">
+      
+        <a href="net.html" class="btn btn-neutral float-right" 
title="FeedForward Net" accesskey="n" rel="next">Next <span class="fa 
fa-arrow-circle-right"></span></a>
+      
+      
+        <a href="tensor.html" class="btn btn-neutral float-left" 
title="Tensor" accesskey="p" rel="prev"><span class="fa 
fa-arrow-circle-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <p>
+        &copy; Copyright 2019 The Apache Software Foundation. All rights 
reserved. Apache SINGA, Apache, the Apache feather logo, and the Apache SINGA 
project logos are trademarks of The Apache Software Foundation. All other marks 
mentioned may be trademarks or registered trademarks of their respective owners.
+
+    </p>
+  </div>
+  Built with <a href="http://sphinx-doc.org/";>Sphinx</a> using a <a 
href="https://github.com/rtfd/sphinx_rtd_theme";>theme</a> provided by <a 
href="https://readthedocs.org";>Read the Docs</a>. 
+
+</footer>
+
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+  
+
+
+  <script type="text/javascript">
+      jQuery(function () {
+          SphinxRtdTheme.Navigation.enable(true);
+      });
+  </script>
+
+  
+  
+    
+  
+
+<div class="rst-versions" data-toggle="rst-versions" role="note" 
aria-label="versions">
+  <span class="rst-current-version" data-toggle="rst-current-version">
+    <span class="fa fa-book"> incubator-singa </span>
+    v: latest
+    <span class="fa fa-caret-down"></span>
+  </span>
+  <div class="rst-other-versions">
+      <dl>
+          <dt>Languages</dt>
+          <dd><a href="../../en/index.html">English</a></dd>
+          <dd><a href="../../zh/index.html">中文</a></dd>
+      </dl>
+      <dl>
+          <dt>Versions</dt>
+          <dd><a href="http://singa.apache.org/v0.3.0/";>0.3</a></dd>
+          <dd><a href="http://singa.apache.org/v1.1.0/";>1.1</a></dd>
+      </dl>
+
+  </div>
+  <a href="http://incubator.apache.org/";> <img src= "../_static/apache.png" 
style="background-color:white;"> </a>
+
+  <a href="https://github.com/apache/incubator-singa"; class="fa fa-github" 
style="padding: 10px; font-size: 20px; width: 30px; text-align: center; 
text-decoration: none; margin: 5px 2px;"></a>
+  <a 
href="https://aws.amazon.com/marketplace/seller-profile?id=5bcac385-12c4-4802-aec7-351e09b77b4c";
 class="fab fa-aws" style="padding: 10px; font-size: 20px; width: 30px; 
text-align: center; text-decoration: none; margin: 5px 2px;"></a>
+  <a href="https://hub.docker.com/r/apache/singa/"; class="fab fa-docker" 
style="padding: 10px; font-size: 20px; width: 30px; text-align: center; 
text-decoration: none; margin: 5px 2px;"></a> 
+  <a href="https://www.linkedin.com/groups/13550034"; class="fa fa-linkedin" 
style="padding: 10px; font-size: 20px; width: 30px; text-align: center; 
text-decoration: none; margin: 5px 2px;"></a>
+  <a href="https://twitter.com/ApacheSinga"; class="fa fa-twitter" 
style="padding: 10px; font-size: 20px; width: 30px; text-align: center; 
text-decoration: none; margin: 5px 2px;"></a>
+  <a href="https://www.facebook.com/Apache-SINGA-347284219056544/"; class="fa 
fa-facebook" style="padding: 10px; font-size: 20px; width: 30px; text-align: 
center; text-decoration: none; margin: 5px 2px;"></a>
+  <a href="https://www.researchgate.net/project/Apache-SINGA"; class="fab 
fa-researchgate" style="padding: 10px; font-size: 20px; width: 30px; 
text-align: center; text-decoration: none; margin: 5px 2px;"></a>
+
+</div>
+
+ <a href="https://github.com/apache/incubator-singa";>
+    <img style="position: absolute; top: 0; right: 0; border: 0; z-index: 
10000;"
+        
src="https://s3.amazonaws.com/github/ribbons/forkme_right_orange_ff7600.png";
+        alt="Fork me on GitHub">
+</a>
+
+ 
+
+
+</body>
+</html>
\ No newline at end of file


Reply via email to