commit:     ea954079af6a11a5d5451daeec5652fed1b2c72b
Author:     Alfredo Tupone <tupone <AT> gentoo <DOT> org>
AuthorDate: Sun Oct 27 13:06:59 2024 +0000
Commit:     Alfredo Tupone <tupone <AT> gentoo <DOT> org>
CommitDate: Sun Oct 27 13:07:36 2024 +0000
URL:        https://gitweb.gentoo.org/repo/gentoo.git/commit/?id=ea954079

sci-libs/tokenizers: drop other network tests

Closes: https://bugs.gentoo.org/942327
Signed-off-by: Alfredo Tupone <tupone <AT> gentoo.org>

 sci-libs/tokenizers/tokenizers-0.20.1.ebuild | 23 +++++++++++++----------
 1 file changed, 13 insertions(+), 10 deletions(-)

diff --git a/sci-libs/tokenizers/tokenizers-0.20.1.ebuild 
b/sci-libs/tokenizers/tokenizers-0.20.1.ebuild
index f2172b1cfd5a..f5e839834cd8 100644
--- a/sci-libs/tokenizers/tokenizers-0.20.1.ebuild
+++ b/sci-libs/tokenizers/tokenizers-0.20.1.ebuild
@@ -325,7 +325,6 @@ src_test() {
        cd ../bindings/python
        local EPYTEST_DESELECT=(
                
"tests/bindings/test_tokenizer.py::TestTokenizer::test_encode_formats"
-               
"tests/documentation/test_tutorial_train_from_iterators.py::TestTrainFromIterators::test_datasets"
                
"tests/bindings/test_encoding.py::TestEncoding::test_sequence_ids"
                
"tests/bindings/test_encoding.py::TestEncoding::test_n_sequences"
                
"tests/bindings/test_encoding.py::TestEncoding::test_word_to_tokens"
@@ -339,28 +338,32 @@ src_test() {
                
"tests/bindings/test_encoding.py::TestEncoding::test_invalid_truncate_direction"
                "tests/bindings/test_models.py::TestBPE::test_instantiate"
                "tests/bindings/test_models.py::TestWordLevel::test_instantiate"
+               "tests/bindings/test_models.py::TestWordPiece::test_instantiate"
                
"tests/bindings/test_processors.py::TestByteLevelProcessing::test_processing"
-               "tests/bindings/test_trainers.py::TestUnigram::test_train"
-               
"tests/documentation/test_pipeline.py::TestPipeline::test_pipeline"
-               
"tests/documentation/test_pipeline.py::TestPipeline::test_bert_example"
-               
"tests/implementations/test_char_bpe.py::TestCharBPETokenizer::test_basic_encode"
-               
"tests/implementations/test_char_bpe.py::TestCharBPETokenizer::test_lowercase"
-               
"tests/implementations/test_char_bpe.py::TestCharBPETokenizer::test_decoding"
-               
"tests/implementations/test_char_bpe.py::TestCharBPETokenizer::test_multiprocessing_with_parallelism"
-               
"tests/test_serialization.py::TestSerialization::test_full_serialization_albert"
-               "tests/test_serialization.py::TestSerialization::test_str_big"
                
"tests/bindings/test_tokenizer.py::TestTokenizer::test_encode_add_special_tokens"
                
"tests/bindings/test_tokenizer.py::TestTokenizer::test_from_pretrained"
                
"tests/bindings/test_tokenizer.py::TestTokenizer::test_from_pretrained_revision"
                
"tests/bindings/test_tokenizer.py::TestTokenizer::test_encode_special_tokens"
                
"tests/bindings/test_tokenizer.py::TestTokenizer::test_splitting"
+               
"tests/bindings/test_trainers.py::TestUnigram::test_continuing_prefix_trainer_mistmatch"
+               "tests/bindings/test_trainers.py::TestUnigram::test_train"
+               
"tests/documentation/test_pipeline.py::TestPipeline::test_pipeline"
+               
"tests/documentation/test_pipeline.py::TestPipeline::test_bert_example"
                
"tests/documentation/test_quicktour.py::TestQuicktour::test_quicktour"
+               
"tests/documentation/test_tutorial_train_from_iterators.py::TestTrainFromIterators::test_datasets"
+               
"tests/documentation/test_tutorial_train_from_iterators.py::TestTrainFromIterators::test_gzip"
                
"tests/implementations/test_bert_wordpiece.py::TestBertWordPieceTokenizer::test_basic_encode"
                
"tests/implementations/test_bert_wordpiece.py::TestBertWordPieceTokenizer::test_multiprocessing_with_parallelism"
                
"tests/implementations/test_byte_level_bpe.py::TestByteLevelBPE::test_basic_encode"
                
"tests/implementations/test_byte_level_bpe.py::TestByteLevelBPE::test_add_prefix_space"
                
"tests/implementations/test_byte_level_bpe.py::TestByteLevelBPE::test_lowerspace"
                
"tests/implementations/test_byte_level_bpe.py::TestByteLevelBPE::test_multiprocessing_with_parallelism"
+               
"tests/implementations/test_char_bpe.py::TestCharBPETokenizer::test_basic_encode"
+               
"tests/implementations/test_char_bpe.py::TestCharBPETokenizer::test_lowercase"
+               
"tests/implementations/test_char_bpe.py::TestCharBPETokenizer::test_decoding"
+               
"tests/implementations/test_char_bpe.py::TestCharBPETokenizer::test_multiprocessing_with_parallelism"
+               
"tests/test_serialization.py::TestSerialization::test_full_serialization_albert"
+               "tests/test_serialization.py::TestSerialization::test_str_big"
        )
        local -x EPYTEST_IGNORE=(benches/)
        distutils-r1_src_test

Reply via email to