Skip to content

Commit

Permalink
[Transformer Optimization]Return model directly for unknown model type (
Browse files Browse the repository at this point in the history
microsoft#18642)

This pull request is used to improves the handling of unsupported model
types in the optimization process.
  • Loading branch information
trajepl authored Dec 4, 2023
1 parent 2f8b86b commit a5b2291
Showing 1 changed file with 8 additions and 0 deletions.
8 changes: 8 additions & 0 deletions onnxruntime/python/tools/transformers/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,10 @@ def optimize_by_fusion(
if model_type not in ["bert", "swin", "unet", "vae", "clip"] and (num_heads == 0 or hidden_size == 0):
logger.warning(f"Please specify parameters of num_heads and hidden_size for model_type {model_type}")

if model_type not in MODEL_TYPES:
logger.warning(f"Unsupported model type: {model_type} for graph fusion, directly return model.")
return OnnxModel(model)

(optimizer_class, producer, _) = MODEL_TYPES[model_type]

if model.producer_name and producer != model.producer_name:
Expand Down Expand Up @@ -290,6 +294,10 @@ def optimize_model(
"""
assert opt_level is None or opt_level in [0, 1, 2, 99]

if model_type not in MODEL_TYPES:
logger.warning(f"Unsupported model type: {model_type} for optimization, directly return model.")
return OnnxModel(load_model(input))

(optimizer_class, _producer, default_opt_level) = MODEL_TYPES[model_type]

if opt_level is None:
Expand Down

0 comments on commit a5b2291

Please sign in to comment.