diff --git a/optimum/exporters/tasks.py b/optimum/exporters/tasks.py index 54a36f06e9..0ebcfc2759 100644 --- a/optimum/exporters/tasks.py +++ b/optimum/exporters/tasks.py @@ -512,7 +512,7 @@ class TasksManager: "feature-extraction-with-past", "text-generation", "text-generation-with-past", - "text-classification", + # "text-classification", # TODO: maybe reenable once fixed. See: https://github.com/huggingface/optimum/pull/1308 "token-classification", onnx="GPT2OnnxConfig", ), @@ -521,7 +521,7 @@ class TasksManager: "feature-extraction-with-past", "text-generation", "text-generation-with-past", - "text-classification", + # "text-classification", # TODO: maybe reenable once fixed. See: https://github.com/huggingface/optimum/pull/1308 "token-classification", onnx="GPTBigCodeOnnxConfig", ), @@ -531,7 +531,7 @@ class TasksManager: "text-generation", "text-generation-with-past", "question-answering", - "text-classification", + # "text-classification", # TODO: maybe reenable once fixed. See: https://github.com/huggingface/optimum/pull/1308 onnx="GPTJOnnxConfig", ), "gpt-neo": supported_tasks_mapping( @@ -539,7 +539,7 @@ class TasksManager: "feature-extraction-with-past", "text-generation", "text-generation-with-past", - "text-classification", + # "text-classification", # TODO: maybe reenable once fixed. See: https://github.com/huggingface/optimum/pull/1308 onnx="GPTNeoOnnxConfig", ), "gpt-neox": supported_tasks_mapping( @@ -714,7 +714,7 @@ class TasksManager: "text-generation", "text-generation-with-past", "question-answering", - "text-classification", + # "text-classification", # TODO: maybe reenable once fixed. See: https://github.com/huggingface/optimum/pull/1308 onnx="OPTOnnxConfig", ), "llama": supported_tasks_mapping( @@ -722,7 +722,7 @@ class TasksManager: "feature-extraction-with-past", "text-generation", "text-generation-with-past", - "text-classification", + # "text-classification", # TODO: maybe reenable once fixed. See: https://github.com/huggingface/optimum/pull/1308 onnx="LlamaOnnxConfig", ), "pegasus": supported_tasks_mapping( diff --git a/optimum/pipelines/pipelines_base.py b/optimum/pipelines/pipelines_base.py index 3ad73f4667..e2046882bd 100644 --- a/optimum/pipelines/pipelines_base.py +++ b/optimum/pipelines/pipelines_base.py @@ -171,7 +171,7 @@ def load_bettertransformer( load_feature_extractor=None, SUPPORTED_TASKS=None, subfolder: str = "", - use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, revision: str = "main", model_kwargs: Optional[Dict[str, Any]] = None, config: AutoConfig = None, @@ -218,7 +218,7 @@ def load_ort_pipeline( load_feature_extractor, SUPPORTED_TASKS, subfolder: str = "", - use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, revision: str = "main", model_kwargs: Optional[Dict[str, Any]] = None, config: AutoConfig = None, @@ -246,7 +246,7 @@ def load_ort_pipeline( pattern, glob_pattern="**/*.onnx", subfolder=subfolder, - use_auth_token=use_auth_token, + use_auth_token=token, revision=revision, ) export = len(onnx_files) == 0 @@ -292,7 +292,7 @@ def pipeline( tokenizer: Optional[Union[str, PreTrainedTokenizer]] = None, feature_extractor: Optional[Union[str, PreTrainedFeatureExtractor]] = None, use_fast: bool = True, - use_auth_token: Optional[Union[str, bool]] = None, + token: Optional[Union[str, bool]] = None, accelerator: Optional[str] = "ort", revision: Optional[str] = None, trust_remote_code: Optional[bool] = None, @@ -315,7 +315,7 @@ def pipeline( # copied from transformers.pipelines.__init__.py hub_kwargs = { "revision": revision, - "use_auth_token": use_auth_token, + "token": token, "trust_remote_code": trust_remote_code, "_commit_hash": None, } @@ -364,6 +364,7 @@ def pipeline( SUPPORTED_TASKS=supported_tasks, config=config, hub_kwargs=hub_kwargs, + token=token, *model_kwargs, **kwargs, ) @@ -379,6 +380,5 @@ def pipeline( tokenizer=tokenizer, feature_extractor=feature_extractor, use_fast=use_fast, - use_auth_token=use_auth_token, **kwargs, ) diff --git a/tests/exporters/onnx/test_onnx_config_loss.py b/tests/exporters/onnx/test_onnx_config_loss.py index 1eed7d9b61..667f599b88 100644 --- a/tests/exporters/onnx/test_onnx_config_loss.py +++ b/tests/exporters/onnx/test_onnx_config_loss.py @@ -123,6 +123,9 @@ def test_onnx_config_with_loss(self): gc.collect() def test_onnx_decoder_model_with_config_with_loss(self): + self.skipTest( + "Skipping due to a bug introduced in transformers with https://github.com/huggingface/transformers/pull/24979, argmax on int64 is not supported by ONNX" + ) with tempfile.TemporaryDirectory() as tmp_dir: # Prepare model and dataset model_checkpoint = "hf-internal-testing/tiny-random-gpt2"