@@ -2480,6 +2480,47 @@ (define-public python-cutlery
@end itemize")
(license license:expat)))
+(define-public python-curated-tokenizers
+ (package
+ (name "python-curated-tokenizers")
+ (version "0.0.9")
+ ;; This source includes third_party protobuf, but a version that
+ ;; is not currently packaged in guix (3.6 < version <= 3.19.5).
+ ;; Try using guix's protobuf when updating.
+ (source
+ (origin
+ (method url-fetch)
+ (uri (pypi-uri "curated-tokenizers" version))
+ (sha256
+ (base32 "09ffs2qjlli35wnf8wf64s14xm75vi5ynvkrn9nqllmk9bjlfgf9"))))
+ (build-system pyproject-build-system)
+ (arguments
+ (list
+ #:phases
+ #~(modify-phases %standard-phases
+ ;; For some reason when both local and installed exist,
+ ;; local is chosen and is missing shared libraries.
+ ;; Use installed version to run tests instead.
+ (add-before 'check 'pre-check
+ (lambda* (#:key tests? inputs outputs #:allow-other-keys)
+ (when tests?
+ (copy-recursively "curated_tokenizers/tests" "tests")
+ (delete-file-recursively "curated_tokenizers")
+ (add-installed-pythonpath inputs outputs)))))))
+ (propagated-inputs (list python-regex))
+ (native-inputs (list python-cython python-pytest))
+ (home-page "https://github.com/explosion/curated-tokenizers")
+ (synopsis "Lightweight piece tokenization library")
+ (description "This package provides a lightweight wordpiece and
+sentencepiece tokenization library. It supports multiple tokenizers:
+@itemize
+@item BPE
+@item Byte BPE
+@item Unigram
+@item Wordpiece
+@end itemize")
+ (license license:expat)))
+
(define-public python-curated-transformers
(package
(name "python-curated-transformers")