piiswrong commented on a change in pull request #7304: gluon bce & ctc losses
URL: https://github.com/apache/incubator-mxnet/pull/7304#discussion_r131713817
 
 

 ##########
 File path: python/mxnet/gluon/loss.py
 ##########
 @@ -239,3 +281,59 @@ def hybrid_forward(self, F, output, label, 
sample_weight=None):
         loss = label * (F.log(label+1e-8) - output)
         loss = _apply_weighting(F, loss, self._weight, sample_weight)
         return F.mean(loss, axis=self._batch_axis, exclude=True)
+
+class CTCLoss(Loss):
+    r"""Connectionist Temporal Classification Loss.
+
+    See `"Connectionist Temporal Classification: Labelling Unsegmented
+    Sequence Data with Recurrent Neural Networks"
+    <http://www.cs.toronto.edu/~graves/icml_2006.pdf>`_ paper for more 
information.
+
+    The prediction output should be an activation vector without softmax, with 
shape
+    according to the output_layout:
+    **TNC**: *(sequence_length, batch_size, alphabet_size + 1)*
+    **NTC**: *(batch_size, sequence_length, alphabet_size + 1)*
+    **out**: *(batch_size)*.
+
+    ``label`` is a tensor of integers between 1 and *alphabet_size*, with 
shape according
+    to the batch_axis:
+    **batch_axis=0**: *(batch_size, label_sequence_length)*
+    **batch_axis=1**: *(label_sequence_length, batch_size)*
+
+    If a sequence of labels is shorter than *label_sequence_length*, use the 
special
+    padding character 0 at the end of the sequence to conform it to the correct
+    length. For example, if *label_sequence_length* = 4, and one has two 
sequences
+    of labels [2, 1] and [3, 2, 2], the resulting ```label``` tensor should be
+    padded to be::
+
+      [[2, 1, 0, 0], [3, 2, 2, 0]]
+
+
+    Parameters
+    ----------
+    output_layout : str, default 'NTC'
+        Layout of the output sequence activation vector.
+    weight : float or None
+        Global scalar weight for loss.
+    sample_weight : Symbol or None
+        Per sample weighting. Must be broadcastable to
+        the same shape as loss. For example, if loss has
+        shape (64, 10) and you want to weight each sample
+        in the batch, `sample_weight` should have shape (64, 1).
+    batch_axis : int, default 0
+        The axis in label that represents mini-batch.
+    """
+    def __init__(self, output_layout='NTC', weight=None, batch_axis=0, 
**kwargs):
 
 Review comment:
   output_layout & label_layout
 
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to