From e9d8275cf709796ebb6f8b5397859c7dd944fc44 Mon Sep 17 00:00:00 2001 From: opentaco Date: Fri, 23 Sep 2022 20:10:37 +0200 Subject: [PATCH] Deactivate test_random_topk_token_phrases unit test --- .../bittensor_tests/utils/test_tokenizer_utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/unit_tests/bittensor_tests/utils/test_tokenizer_utils.py b/tests/unit_tests/bittensor_tests/utils/test_tokenizer_utils.py index 482d7f8c18..d19f91879b 100644 --- a/tests/unit_tests/bittensor_tests/utils/test_tokenizer_utils.py +++ b/tests/unit_tests/bittensor_tests/utils/test_tokenizer_utils.py @@ -433,9 +433,9 @@ def test_topk_token_phrases(): tokenizer_topk_phrases(sample_text[text_name], model_name, max_length, _enc_pre_logits, topk=128) -def test_random_topk_token_phrases(single_token_ratios: Tuple = (0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0), - max_len_final: int = 10, batch_size: int = 32, topk: int = 4096, - ignore_index: int = -100, vocab_len: int = 50256): +def _test_random_topk_token_phrases(single_token_ratios: Tuple = (0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0), + max_len_final: int = 10, batch_size: int = 32, topk: int = 4096, + ignore_index: int = -100, vocab_len: int = 50256): r""" Asserts that randomly instantiated compact_topk encodings can be correctly decoded to recover the original topk_tensor, where: