diff --git a/tests/distributed/distributed.py b/tests/distributed/distributed.py index c3aabf8e2..690140cd1 100644 --- a/tests/distributed/distributed.py +++ b/tests/distributed/distributed.py @@ -97,7 +97,8 @@ class DistributedExec(ABC): exec_timeout: int = TEST_TIMEOUT @abstractmethod - def run(self): ... + def run(self): + ... def __call__(self, request=None): self._fixture_kwargs = self._get_fixture_kwargs(request, self.run) diff --git a/tests/exporters/test_export.py b/tests/exporters/test_export.py index 62c304c3c..0c5eae1de 100644 --- a/tests/exporters/test_export.py +++ b/tests/exporters/test_export.py @@ -68,8 +68,8 @@ def _get_models_to_test( model_type = model_type.replace("_", "-") if exclude_model_types is None or (model_type not in exclude_model_types): task_config_mapping = TasksManager.get_supported_tasks_for_model_type( - model_type, "neuron", library_name=library_name - ) + model_type, "neuron", library_name=library_name + ) if isinstance(model_names_tasks, str): # test export of all tasks on the same model tasks = list(task_config_mapping.keys()) @@ -171,11 +171,12 @@ def _neuronx_export( def test_export(self, test_name, name, model_name, task, neuron_config_constructor): self._neuronx_export(test_name, name, model_name, task, neuron_config_constructor) -<<<<<<< HEAD - @parameterized.expand(_get_models_to_test(SENTENCE_TRANSFORMERS_MODELS, library_name="sentence_transformers")) -======= @parameterized.expand( - _get_models_to_test(EXPORT_MODELS_TINY, exclude_model_types=WEIGHTS_NEFF_SEPARATION_UNSUPPORTED_ARCH) + _get_models_to_test( + EXPORT_MODELS_TINY, + exclude_model_types=WEIGHTS_NEFF_SEPARATION_UNSUPPORTED_ARCH, + library_name="transformers", + ) ) @is_inferentia_test @requires_neuronx @@ -184,8 +185,7 @@ def test_export_separated_weights(self, test_name, name, model_name, task, neuro test_name, name, model_name, task, neuron_config_constructor, inline_weights_to_neff=False ) - @parameterized.expand(_get_models_to_test(SENTENCE_TRANSFORMERS_MODELS)) ->>>>>>> main + @parameterized.expand(_get_models_to_test(SENTENCE_TRANSFORMERS_MODELS, library_name="sentence_transformers")) @is_inferentia_test @require_vision @require_sentence_transformers diff --git a/tests/test_cache_utils.py b/tests/test_cache_utils.py index 567de3178..ffd2c2e7d 100644 --- a/tests/test_cache_utils.py +++ b/tests/test_cache_utils.py @@ -83,9 +83,9 @@ def test_get_neuron_cache_path(self): assert get_neuron_cache_path() is None custom_cache_dir_name = Path("_this/is_/my1/2custom/cache/dir") - os.environ["NEURON_CC_FLAGS"] = ( - f"--some --parameters --here --cache_dir={custom_cache_dir_name} --other --paremeters --here" - ) + os.environ[ + "NEURON_CC_FLAGS" + ] = f"--some --parameters --here --cache_dir={custom_cache_dir_name} --other --paremeters --here" self.assertEqual(get_neuron_cache_path(), custom_cache_dir_name) @@ -99,9 +99,9 @@ def _test_set_neuron_cache_path(self, new_cache_path): set_neuron_cache_path(new_cache_path, ignore_no_cache=True) self.assertEqual(get_neuron_cache_path(), Path(new_cache_path)) - os.environ["NEURON_CC_FLAGS"] = ( - "--some --parameters --here --cache_dir=original_cache_dir --other --paremeters" - ) + os.environ[ + "NEURON_CC_FLAGS" + ] = "--some --parameters --here --cache_dir=original_cache_dir --other --paremeters" set_neuron_cache_path(new_cache_path) self.assertEqual(get_neuron_cache_path(), Path(new_cache_path))