diff mbox series

[bug#73266,7/9] gnu: Add python-curated-transformers.

Message ID 20240915085720.13323-7-ngraves@ngraves.fr
State New
Headers show
Series [bug#73266,1/9] gnu: Add python-azure-storage-file-datalake. | expand

Commit Message

Nicolas Graves Sept. 15, 2024, 8:57 a.m. UTC
* gnu/packages/machine-learning.scm (python-curated-transformers): New variable.

Change-Id: I42cf780097456f5a8a9a9efc2a56e2c082d2a938
---
 gnu/packages/machine-learning.scm | 55 +++++++++++++++++++++++++++++++
 1 file changed, 55 insertions(+)
diff mbox series

Patch

diff --git a/gnu/packages/machine-learning.scm b/gnu/packages/machine-learning.scm
index 89fcd3c1b7..d1b282fea8 100644
--- a/gnu/packages/machine-learning.scm
+++ b/gnu/packages/machine-learning.scm
@@ -2480,6 +2480,61 @@  (define-public python-cutlery
 @end itemize")
     (license license:expat)))
 
+(define-public python-curated-transformers
+  (package
+    (name "python-curated-transformers")
+    (version "0.1.0")
+    (source
+     (origin
+       (method url-fetch)
+       (uri (pypi-uri "curated-transformers" version))
+       (sha256
+        (base32 "04k54r5cxjl3l7xs4kx4cfnqsjr7gdlr577sp7sl7qgrk3kfqjbm"))))
+    (build-system pyproject-build-system)
+    (arguments
+     (list
+      #:test-flags
+      '(list  ; Most ignored tests require network.
+        "--ignore=curated_transformers/tests/tokenizers/test_auto_tokenizer.py"
+        "-k" (string-append "not test_special_pieces"
+                            " and not test_auto_encoder"
+                            " and not test_auto_decoder"
+                            " and not test_auto_causal_lm"
+                            " and not test_from_hf_hub_to_cache"
+                            " and not test_from_hf_hub_to_cache_legacy"
+                            " and not test_checkpoint_type_without_safetensors"
+                            " and not test_hf_hub_failures"
+                            ;; These have been added when downgrading curated_tokenizers.
+                            " and not test_camembert_tokenizer_toy_tokenizer"
+                            " and not test_roberta_tokenizer"
+                            " and not test_xlmr_toy_tokenizer"))))
+    (propagated-inputs (list python-catalogue
+                             python-cutlery
+                             python-huggingface-hub
+                             python-pytorch
+                             python-tokenizers))
+    (native-inputs (list python-pytest))
+    (home-page "https://github.com/explosion/curated-transformers")
+    (synopsis "PyTorch library of transformer models and components")
+    (description
+     "This package provides a @code{PyTorch} library of transformer models and
+components.  It helps to download state-of-the-art models that are composed
+from a set of reusable components.  The stand-out features of Curated
+Transformer are:
+
+@itemize
+@item Supports state-of-the art transformer models, including LLMs such as
+Falcon, Llama, and Dolly v2.
+@item Each model is composed from a set of reusable building blocks, providing
+many benefits: implementing a feature or bugfix benefits all models ; Adding
+new models to the library is low-effort.
+@item Consistent type annotations of all public APIs, hence a great coding
+support from IDEs.  Integrates well with your existing type-checked code.
+@item Great for education, because the building blocks are easy to study.
+@item Minimal dependencies.
+@end itemize")
+    (license license:expat)))
+
 (define-public python-autograd
   (let* ((commit "c6d81ce7eede6db801d4e9a92b27ec5d409d0eab")
          (revision "0")