andreas pushed a commit to branch python-team
in repository guix.

commit b44a5ee4ee0b505876c81cafa10c452403b95635
Author: Sharlatan Hellseher <sharlata...@gmail.com>
AuthorDate: Tue Mar 25 12:08:21 2025 +0000

    gnu: python-autograd: Fix indentation.
    
    * gnu/packages/machine-learning.scm (python-autograd): Fix indentation.
    
    Change-Id: I67b1c01d323e2458b49447969bb4164f71d1571b
---
 gnu/packages/machine-learning.scm | 51 +++++++++++++++++++++------------------
 1 file changed, 27 insertions(+), 24 deletions(-)

diff --git a/gnu/packages/machine-learning.scm 
b/gnu/packages/machine-learning.scm
index d0439fde45..8e2de4f580 100644
--- a/gnu/packages/machine-learning.scm
+++ b/gnu/packages/machine-learning.scm
@@ -2681,34 +2681,37 @@ Covariance Matrix Adaptation Evolution Strategy 
(CMA-ES) for Python.")
     (license license:expat)))
 
 (define-public python-autograd
-    (package
-      (name "python-autograd")
-      (version "1.7.0")
-      (source (origin
-                (method git-fetch)
-                (uri (git-reference
-                      (url "https://github.com/HIPS/autograd";)
-                      (commit (string-append "v" version))))
-                (sha256
-                 (base32
-                  "1fpnmm3mzw355iq7w751j4mjfcr0yh324cxidba1l22652gg8r8m"))
-                (file-name (git-file-name name version))))
-      (build-system pyproject-build-system)
-      (native-inputs
-       (list python-hatchling python-pytest))
-      (propagated-inputs
-       (list python-future python-numpy))
-      (home-page "https://github.com/HIPS/autograd";)
-      (synopsis "Efficiently computes derivatives of NumPy code")
-      (description "Autograd can automatically differentiate native Python and
-NumPy code.  It can handle a large subset of Python's features, including 
loops,
-ifs, recursion and closures, and it can even take derivatives of derivatives
-of derivatives.  It supports reverse-mode differentiation
+  (package
+    (name "python-autograd")
+    (version "1.7.0")
+    (source
+     (origin
+       (method git-fetch)
+       (uri (git-reference
+             (url "https://github.com/HIPS/autograd";)
+             (commit (string-append "v" version))))
+       (sha256
+        (base32 "1fpnmm3mzw355iq7w751j4mjfcr0yh324cxidba1l22652gg8r8m"))
+       (file-name (git-file-name name version))))
+    (build-system pyproject-build-system)
+    (native-inputs
+     (list python-hatchling
+           python-pytest))
+    (propagated-inputs
+     (list python-future
+           python-numpy))
+    (home-page "https://github.com/HIPS/autograd";)
+    (synopsis "Efficiently computes derivatives of NumPy code")
+    (description
+     "Autograd can automatically differentiate native Python and NumPy code.
+It can handle a large subset of Python's features, including loops, ifs,
+recursion and closures, and it can even take derivatives of derivatives of
+derivatives.  It supports reverse-mode differentiation
 (a.k.a. backpropagation), which means it can efficiently take gradients of
 scalar-valued functions with respect to array-valued arguments, as well as
 forward-mode differentiation, and the two can be composed arbitrarily.  The
 main intended application of Autograd is gradient-based optimization.")
-      (license license:expat)))
+    (license license:expat)))
 
 (define-public lightgbm
   (package

Reply via email to