This is an automated email from the ASF dual-hosted git repository.

xidulu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git


The following commit(s) were added to refs/heads/master by this push:
     new 7dcfedc  Update qr backward documentation (#19243)
7dcfedc is described below

commit 7dcfedca704f39b4b9b7497dabf3fea47ad40df4
Author: Denisa Roberts <[email protected]>
AuthorDate: Tue Sep 29 00:37:10 2020 -0400

    Update qr backward documentation (#19243)
---
 python/mxnet/numpy/linalg.py          | 5 +++++
 src/operator/numpy/linalg/np_qr-inl.h | 3 ++-
 2 files changed, 7 insertions(+), 1 deletion(-)

diff --git a/python/mxnet/numpy/linalg.py b/python/mxnet/numpy/linalg.py
index d2756d5..975b889 100644
--- a/python/mxnet/numpy/linalg.py
+++ b/python/mxnet/numpy/linalg.py
@@ -428,6 +428,11 @@ def qr(a, mode='reduced'):
     MXNetError
         If factoring fails.
 
+    Notes
+    -----
+    Currently, the gradient for the QR factorization is well-defined
+    only when the first K columns of the input matrix are linearly independent.
+
     Examples
     --------
     >>> from mxnet import np
diff --git a/src/operator/numpy/linalg/np_qr-inl.h 
b/src/operator/numpy/linalg/np_qr-inl.h
index c57e2d6..19df4bb 100644
--- a/src/operator/numpy/linalg/np_qr-inl.h
+++ b/src/operator/numpy/linalg/np_qr-inl.h
@@ -518,7 +518,8 @@ struct QrBackHelper_G2 {
   }
 };
 
-
+// QR backward methodology is explained in detail in
+// https://arxiv.org/abs/2009.10071
 struct qr_backward {
   template<typename xpu, typename DType>
   static void op(const Tensor<xpu, 3, DType>& dA,

Reply via email to