From 0fcdff811bbc07a292a928092853c8e99b518ff1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20Marty?= <9808326+fxmarty@users.noreply.github.com> Date: Wed, 26 Jul 2023 18:23:33 +0200 Subject: [PATCH] fix --- tests/bettertransformer/test_encoder.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/bettertransformer/test_encoder.py b/tests/bettertransformer/test_encoder.py index 6a2e520276..1a152c7f6e 100644 --- a/tests/bettertransformer/test_encoder.py +++ b/tests/bettertransformer/test_encoder.py @@ -256,6 +256,7 @@ def test_accelerate_compatibility_single_gpu_without_keeping(self): ) ) def test_logits(self, test_name: str, model_type: str, batch_size: int): + # TODO: enable those tests if model_type in ["rocbert", "splinter", "markuplm", "bert-generation"]: self.skipTest(f"tiny tokenizers are broken on the Hub {model_type}") if model_type in ["tapas"]: @@ -273,6 +274,7 @@ def test_logits(self, test_name: str, model_type: str, batch_size: int): ) ) def test_logits_backward(self, test_name: str, model_type: str, batch_size: int): + # TODO: enable those tests if model_type in ["rocbert", "splinter", "markuplm", "bert-generation"]: self.skipTest(f"tiny tokenizer is broken on the Hub for {model_type}") if model_type in ["tapas"]: @@ -293,6 +295,12 @@ def test_save_load_invertible(self, test_name: str, model_type: str, keep_origin @parameterized.expand(grid_parameters(FULL_GRID)) def test_invert_model_logits(self, test_name: str, model_type: str, keep_original_model=False): + # TODO: reenable those tests + if model_type in ["rocbert", "splinter", "markuplm", "bert-generation"]: + self.skipTest(f"tiny tokenizers are broken on the Hub {model_type}") + if model_type in ["tapas"]: + self.skipTest(f"{model_type} requires dataframe") + model_id = MODELS_DICT[model_type] self._test_invert_model_logits( model_id=model_id, model_type=model_type, keep_original_model=keep_original_model