gnu: Add python-autograd

* gnu/packages/machine-learning.scm (python-autograd, python2-autograd): New
variables.

Signed-off-by: Ludovic Courtès <ludo@gnu.org>
This commit is contained in:
Fis Trivial 2018-04-28 03:47:03 +00:00 committed by Ludovic Courtès
parent 3c2d267f4b
commit 2dab4188ec
No known key found for this signature in database
GPG Key ID: 090B11993D9AEBB5
1 changed files with 44 additions and 0 deletions

View File

@ -6,6 +6,7 @@
;;; Copyright © 2018 Tobias Geerinckx-Rice <me@tobias.gr>
;;; Copyright © 2018 Mark Meyer <mark@ofosos.org>
;;; Copyright © 2018 Ben Woodcroft <donttrustben@gmail.com>
;;; Copyright © 2018 Fis Trivial <ybbs.daans@hotmail.com>
;;;
;;; This file is part of GNU Guix.
;;;
@ -688,3 +689,46 @@ mining and data analysis.")
(define-public python2-scikit-learn
(package-with-python2 python-scikit-learn))
(define-public python-autograd
(let* ((commit "442205dfefe407beffb33550846434baa90c4de7")
(revision "0")
(version (git-version "0.0.0" revision commit)))
(package
(name "python-autograd")
(home-page "https://github.com/HIPS/autograd")
(source (origin
(method git-fetch)
(uri (git-reference
(url home-page)
(commit commit)))
(sha256
(base32
"189sv2xb0mwnjawa9z7mrgdglc1miaq93pnck26r28fi1jdwg0z4"))
(file-name (git-file-name name version))))
(version version)
(build-system python-build-system)
(native-inputs
`(("python-nose" ,python-nose)
("python-pytest" ,python-pytest)))
(propagated-inputs
`(("python-future" ,python-future)
("python-numpy" ,python-numpy)))
(arguments
`(#:phases (modify-phases %standard-phases
(replace 'check
(lambda _
(invoke "py.test" "-v"))))))
(synopsis "Efficiently computes derivatives of NumPy code")
(description "Autograd can automatically differentiate native Python and
NumPy code. It can handle a large subset of Python's features, including loops,
ifs, recursion and closures, and it can even take derivatives of derivatives
of derivatives. It supports reverse-mode differentiation
(a.k.a. backpropagation), which means it can efficiently take gradients of
scalar-valued functions with respect to array-valued arguments, as well as
forward-mode differentiation, and the two can be composed arbitrarily. The
main intended application of Autograd is gradient-based optimization.")
(license license:expat))))
(define-public python2-autograd
(package-with-python2 python-autograd))