summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAlfredo Tupone <tupone@gentoo.org>2024-10-26 23:05:11 +0200
committerAlfredo Tupone <tupone@gentoo.org>2024-10-26 23:05:42 +0200
commit9ca7be15bb4a93d5cb264ac0173c164bcf2401ac (patch)
tree4ae21e16c4e1c924c672578d5286cfb59bc1d989 /sci-libs/tokenizers
parentapp-misc/cw: drop 1.0.16-r3 (diff)
downloadgentoo-9ca7be15bb4a93d5cb264ac0173c164bcf2401ac.tar.gz
gentoo-9ca7be15bb4a93d5cb264ac0173c164bcf2401ac.tar.bz2
gentoo-9ca7be15bb4a93d5cb264ac0173c164bcf2401ac.zip
sci-libs/tokenizers: drop failing test
Closes: https://bugs.gentoo.org/941910 Signed-off-by: Alfredo Tupone <tupone@gentoo.org>
Diffstat (limited to 'sci-libs/tokenizers')
-rw-r--r--sci-libs/tokenizers/tokenizers-0.20.1.ebuild35
1 files changed, 35 insertions, 0 deletions
diff --git a/sci-libs/tokenizers/tokenizers-0.20.1.ebuild b/sci-libs/tokenizers/tokenizers-0.20.1.ebuild
index 1feae074105a..f2172b1cfd5a 100644
--- a/sci-libs/tokenizers/tokenizers-0.20.1.ebuild
+++ b/sci-libs/tokenizers/tokenizers-0.20.1.ebuild
@@ -326,6 +326,41 @@ src_test() {
local EPYTEST_DESELECT=(
"tests/bindings/test_tokenizer.py::TestTokenizer::test_encode_formats"
"tests/documentation/test_tutorial_train_from_iterators.py::TestTrainFromIterators::test_datasets"
+ "tests/bindings/test_encoding.py::TestEncoding::test_sequence_ids"
+ "tests/bindings/test_encoding.py::TestEncoding::test_n_sequences"
+ "tests/bindings/test_encoding.py::TestEncoding::test_word_to_tokens"
+ "tests/bindings/test_encoding.py::TestEncoding::test_word_to_chars"
+ "tests/bindings/test_encoding.py::TestEncoding::test_token_to_sequence"
+ "tests/bindings/test_encoding.py::TestEncoding::test_token_to_chars"
+ "tests/bindings/test_encoding.py::TestEncoding::test_token_to_word"
+ "tests/bindings/test_encoding.py::TestEncoding::test_char_to_token"
+ "tests/bindings/test_encoding.py::TestEncoding::test_char_to_word"
+ "tests/bindings/test_encoding.py::TestEncoding::test_truncation"
+ "tests/bindings/test_encoding.py::TestEncoding::test_invalid_truncate_direction"
+ "tests/bindings/test_models.py::TestBPE::test_instantiate"
+ "tests/bindings/test_models.py::TestWordLevel::test_instantiate"
+ "tests/bindings/test_processors.py::TestByteLevelProcessing::test_processing"
+ "tests/bindings/test_trainers.py::TestUnigram::test_train"
+ "tests/documentation/test_pipeline.py::TestPipeline::test_pipeline"
+ "tests/documentation/test_pipeline.py::TestPipeline::test_bert_example"
+ "tests/implementations/test_char_bpe.py::TestCharBPETokenizer::test_basic_encode"
+ "tests/implementations/test_char_bpe.py::TestCharBPETokenizer::test_lowercase"
+ "tests/implementations/test_char_bpe.py::TestCharBPETokenizer::test_decoding"
+ "tests/implementations/test_char_bpe.py::TestCharBPETokenizer::test_multiprocessing_with_parallelism"
+ "tests/test_serialization.py::TestSerialization::test_full_serialization_albert"
+ "tests/test_serialization.py::TestSerialization::test_str_big"
+ "tests/bindings/test_tokenizer.py::TestTokenizer::test_encode_add_special_tokens"
+ "tests/bindings/test_tokenizer.py::TestTokenizer::test_from_pretrained"
+ "tests/bindings/test_tokenizer.py::TestTokenizer::test_from_pretrained_revision"
+ "tests/bindings/test_tokenizer.py::TestTokenizer::test_encode_special_tokens"
+ "tests/bindings/test_tokenizer.py::TestTokenizer::test_splitting"
+ "tests/documentation/test_quicktour.py::TestQuicktour::test_quicktour"
+ "tests/implementations/test_bert_wordpiece.py::TestBertWordPieceTokenizer::test_basic_encode"
+ "tests/implementations/test_bert_wordpiece.py::TestBertWordPieceTokenizer::test_multiprocessing_with_parallelism"
+ "tests/implementations/test_byte_level_bpe.py::TestByteLevelBPE::test_basic_encode"
+ "tests/implementations/test_byte_level_bpe.py::TestByteLevelBPE::test_add_prefix_space"
+ "tests/implementations/test_byte_level_bpe.py::TestByteLevelBPE::test_lowerspace"
+ "tests/implementations/test_byte_level_bpe.py::TestByteLevelBPE::test_multiprocessing_with_parallelism"
)
local -x EPYTEST_IGNORE=(benches/)
distutils-r1_src_test