Skip to content

Commit

Permalink
Add ONNX export support for RT-DETR models
Browse files Browse the repository at this point in the history
  • Loading branch information
xenova committed Jun 28, 2024
1 parent d0a84a9 commit f28ed38
Show file tree
Hide file tree
Showing 4 changed files with 50 additions and 0 deletions.
1 change: 1 addition & 0 deletions docs/source/exporters/onnx/overview.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ Supported architectures from [🤗 Transformers](https://huggingface.co/docs/tra
- ResNet
- Roberta
- Roformer
- RT-DETR
- SAM
- Segformer
- SEW
Expand Down
43 changes: 43 additions & 0 deletions optimum/exporters/onnx/model_configs.py
Original file line number Diff line number Diff line change
Expand Up @@ -787,6 +787,49 @@ def outputs(self) -> Dict[str, Dict[int, str]]:
return super().outputs


class RTDetrDummyInputGenerator(DummyVisionInputGenerator):
def __init__(
self,
task: str,
normalized_config: NormalizedVisionConfig,
batch_size: int = DEFAULT_DUMMY_SHAPES["batch_size"],
num_channels: int = DEFAULT_DUMMY_SHAPES["num_channels"],
width: int = DEFAULT_DUMMY_SHAPES["width"],
height: int = DEFAULT_DUMMY_SHAPES["height"],
**kwargs,
):
super().__init__(
task=task,
normalized_config=normalized_config,
batch_size=batch_size,
num_channels=num_channels,
width=width,
height=height,
**kwargs,
)

from transformers.onnx.utils import get_preprocessor

preprocessor = get_preprocessor(normalized_config._name_or_path)
if preprocessor is not None and hasattr(preprocessor, "size"):
self.height = preprocessor.size.get("height", self.height)
self.width = preprocessor.size.get("width", self.width)

def generate(self, input_name: str, framework: str = "pt", int_dtype: str = "int64", float_dtype: str = "fp32"):
input_ = super().generate(
input_name=input_name, framework=framework, int_dtype=int_dtype, float_dtype=float_dtype
)
return input_


class RTDetrOnnxConfig(ViTOnnxConfig):
# OPSET=16 required. Otherwise we get the following error:
# torch.onnx.errors.UnsupportedOperatorError: Exporting the operator 'aten::grid_sampler' to ONNX opset version 12 is not supported. Support for this operator was added in version 16, try exporting with this version.
DEFAULT_ONNX_OPSET = 16
DUMMY_INPUT_GENERATOR_CLASSES = (RTDetrDummyInputGenerator, )
ATOL_FOR_VALIDATION = 1e-3


class TableTransformerOnnxConfig(DetrOnnxConfig):
pass

Expand Down
5 changes: 5 additions & 0 deletions optimum/exporters/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -951,6 +951,11 @@ class TasksManager:
onnx="RoFormerOnnxConfig",
tflite="RoFormerTFLiteConfig",
),
"rt-detr": supported_tasks_mapping(
"feature-extraction",
"object-detection",
onnx="RTDetrOnnxConfig",
),
"sam": supported_tasks_mapping(
"feature-extraction",
onnx="SamOnnxConfig",
Expand Down
1 change: 1 addition & 0 deletions optimum/utils/normalized_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,6 +216,7 @@ class NormalizedConfigManager:
'owlvit',
'perceiver',
'roformer',
'rt-detr',
'squeezebert',
'table-transformer',
"""
Expand Down

0 comments on commit f28ed38

Please sign in to comment.