From c70a3dbba823702d3b9d0fb53427590cf990c046 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20Marty?= <9808326+fxmarty@users.noreply.github.com> Date: Wed, 26 Jul 2023 17:25:39 +0200 Subject: [PATCH] fix tests --- tests/bettertransformer/test_audio.py | 16 --------- tests/bettertransformer/test_decoder.py | 5 --- tests/bettertransformer/test_encoder.py | 15 -------- .../bettertransformer/test_encoder_decoder.py | 13 ------- tests/bettertransformer/test_vision.py | 12 ------- tests/bettertransformer/testing_utils.py | 35 +++---------------- 6 files changed, 5 insertions(+), 91 deletions(-) diff --git a/tests/bettertransformer/test_audio.py b/tests/bettertransformer/test_audio.py index 5d995ce439..595bf6c5a4 100644 --- a/tests/bettertransformer/test_audio.py +++ b/tests/bettertransformer/test_audio.py @@ -160,22 +160,6 @@ def test_logits(self, model_type: str): ), ) - @parameterized.expand(SUPPORTED_ARCH) - def test_raise_autocast(self, model_type: str): - model_ids = ( - MODELS_DICT[model_type] if isinstance(MODELS_DICT[model_type], tuple) else (MODELS_DICT[model_type],) - ) - for model_id in model_ids: - self._test_raise_autocast(model_id, model_type=model_type) - - @parameterized.expand(SUPPORTED_ARCH) - def test_raise_train(self, model_type: str): - model_ids = ( - MODELS_DICT[model_type] if isinstance(MODELS_DICT[model_type], tuple) else (MODELS_DICT[model_type],) - ) - for model_id in model_ids: - self._test_raise_train(model_id, model_type=model_type) - @parameterized.expand(grid_parameters(FULL_GRID)) def test_invert_modules(self, test_name: str, model_type: str, keep_original_model=False): if model_type in ["hubert", "wav2vec2"] and keep_original_model is True: diff --git a/tests/bettertransformer/test_decoder.py b/tests/bettertransformer/test_decoder.py index a19e64fcf2..a417216517 100644 --- a/tests/bettertransformer/test_decoder.py +++ b/tests/bettertransformer/test_decoder.py @@ -181,11 +181,6 @@ def test_generation(self, test_name: str, model_type: str, batch_size: int, padd f" Maxdiff: {(result_vanilla - result_bettertransformer).abs().max()}", ) - @parameterized.expand(SUPPORTED_ARCH) - def test_raise_autocast(self, model_type: str): - model_id = MODELS_DICT[model_type] - self._test_raise_autocast(model_id, model_type=model_type) - @parameterized.expand(SUPPORTED_ARCH) @pytest.mark.training def test_train(self, model_type: str): diff --git a/tests/bettertransformer/test_encoder.py b/tests/bettertransformer/test_encoder.py index 2c44177fe9..6a2e520276 100644 --- a/tests/bettertransformer/test_encoder.py +++ b/tests/bettertransformer/test_encoder.py @@ -209,21 +209,6 @@ def check_accelerate_compatibility_cpu_gpu(self, keep_original_model=True, max_m self.assertTrue(torch.allclose(output_bt[0][1, 3:], torch.zeros_like(output_bt[0][1, 3:]))) gc.collect() - @parameterized.expand(SUPPORTED_ARCH) - def test_raise_autocast(self, model_type: str): - if model_type == "rocbert": - self.skipTest( - "unrelated issue with torch.amp.autocast with rocbert (expected scalar type BFloat16 but found Float)" - ) - - model_id = MODELS_DICT[model_type] - self._test_raise_autocast(model_id, model_type) - - @parameterized.expand(SUPPORTED_ARCH) - def test_raise_train(self, model_type: str): - model_id = MODELS_DICT[model_type] - self._test_raise_train(model_id, model_type) - @pytest.mark.gpu_test @pytest.mark.accelerate_test def test_accelerate_compatibility_cpu_gpu(self): diff --git a/tests/bettertransformer/test_encoder_decoder.py b/tests/bettertransformer/test_encoder_decoder.py index f400d16967..df74ed03d2 100644 --- a/tests/bettertransformer/test_encoder_decoder.py +++ b/tests/bettertransformer/test_encoder_decoder.py @@ -89,19 +89,6 @@ def test_logits_backward(self, test_name: str, model_type: str, padding, max_len model_id = MODELS_DICT[model_type] self._test_logits_backward(model_id, model_type=model_type, padding=padding, max_length=max_length) - @parameterized.expand(SUPPORTED_ARCH) - def test_raise_autocast(self, model_type: str): - model_id = MODELS_DICT[model_type] - self._test_raise_autocast(model_id, model_type=model_type) - - @parameterized.expand(SUPPORTED_ARCH) - def test_raise_train(self, model_type: str): - model_id = MODELS_DICT[model_type] - if model_type not in ["blenderbot", "pegasus", "t5"]: - self._test_raise_train(model_id, model_type=model_type) - else: - self._test_train_decoder(model_id, model_type=model_type) - @parameterized.expand(grid_parameters(FULL_GRID)) def test_invert_modules(self, test_name: str, model_type: str, keep_original_model=False): model_id = MODELS_DICT[model_type] diff --git a/tests/bettertransformer/test_vision.py b/tests/bettertransformer/test_vision.py index 48410dff7b..ea04936fab 100644 --- a/tests/bettertransformer/test_vision.py +++ b/tests/bettertransformer/test_vision.py @@ -73,18 +73,6 @@ def test_logits(self, model_type: str): model_id = MODELS_DICT[model_type] self._test_logits(model_id, model_type=model_type) - @parameterized.expand(SUPPORTED_ARCH) - def test_raise_autocast(self, model_type: str): - model_id = MODELS_DICT[model_type] - self._test_raise_autocast(model_id, model_type=model_type) - - @parameterized.expand(SUPPORTED_ARCH) - def test_raise_train(self, model_type: str): - if model_type in ["blip-2"]: - self.skipTest("can be trained") - model_id = MODELS_DICT[model_type] - self._test_raise_train(model_id, model_type=model_type) - @parameterized.expand( grid_parameters( { diff --git a/tests/bettertransformer/testing_utils.py b/tests/bettertransformer/testing_utils.py index 0e8a7c7e06..c63d5d241e 100644 --- a/tests/bettertransformer/testing_utils.py +++ b/tests/bettertransformer/testing_utils.py @@ -108,9 +108,6 @@ class BetterTransformersTestMixin(unittest.TestCase): - `test_logits`: This tests if the converted model produces the same logits than the original model. - `test_raise_on_save`: Test if the converion properly raises an error if someone tries to save the model using `save_pretrained`. - - `test_raise_autocast`: A tests that checks if the conversion raises an error if the model is run under - `torch.cuda.amp.autocast`. - - `test_raise_train`: A tests that checks if the conversion raises an error if the model is run in training mode. """ def prepare_inputs_for_class(self, model_id=None, model_type=None): @@ -168,6 +165,7 @@ def _test_logits_backward(self, model_id: str, model_type: str, **preprocessor_k # `torch.random.set_rng_state`. An alternative could be to make dropout stateful, # and to replace them with a static pattern for this test. Currently, we use # functional dropout though. + # We need to be in train mode to take the right path. random_config = set_dropout_to_zero(random_config) # m2m_100 randomly drops layers, which makes testing flaky (see `skip_the_layer` in transformers, some other models use it as well) @@ -229,9 +227,13 @@ def _test_logits(self, model_id: str, model_type: str, **preprocessor_kwargs): hf_random_model = AutoModel.from_pretrained(model_id).eval() random_config = hf_random_model.config + hf_random_model = hf_random_model.eval() + torch.manual_seed(0) converted_model = BetterTransformer.transform(hf_random_model, keep_original_model=True) + self.assertFalse(hf_random_model.training) + self.assertFalse(converted_model.training) self.assertFalse( hasattr(hf_random_model, "use_bettertransformer"), f"The model {hf_random_model.__class__.__name__} has been converted to a `fast` model by mistake.", @@ -290,33 +292,6 @@ def assert_equal(self, tensor1, tensor2, atol: float, model_name: str): f" Maxdiff: {torch.abs(tensor1 - tensor2).max()}", ) - def _test_raise_autocast(self, model_id: str, model_type: str, **kwargs): - r""" - A tests that checks if the conversion raises an error if the model is run under - `torch.cuda.amp.autocast`. - """ - inputs = self.prepare_inputs_for_class(model_id=model_id, model_type=model_type, **kwargs) - hf_random_model = AutoModel.from_pretrained(model_id).eval() - - # Check for the autocast on CPU - with self.assertRaises(ValueError), torch.amp.autocast("cpu"): - bt_model = BetterTransformer.transform(hf_random_model, keep_original_model=True) - _ = bt_model(**inputs) - - def _test_raise_train(self, model_id: str, model_type: str, **kwargs): - r""" - A tests that checks if the conversion raises an error if the model is run under - `model.train()`. - """ - inputs = self.prepare_inputs_for_class(model_id=model_id, model_type=model_type, **kwargs) - - hf_random_model = AutoModel.from_pretrained(model_id).eval() - # Check for training mode - with self.assertRaises(ValueError): - bt_model = BetterTransformer.transform(hf_random_model, keep_original_model=True) - bt_model.train() - _ = bt_model(**inputs) - def _test_train_decoder(self, model_id: str, model_type: str, **kwargs): r""" A tests that checks if the training works as expected for decoder models.