Skip to content

Commit

Permalink
Address comments
Browse files Browse the repository at this point in the history
  • Loading branch information
es94129 committed Aug 15, 2023
1 parent c5ab158 commit 10bf60a
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions llmfoundry/models/hf/model_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,9 @@ def __init__(self,

self.model_forward_args = inspect.getfullargspec(
self.model.forward).args
# inspecting HuggingFace quantized model could not return args correctly
# inspect.getfullargspec HuggingFace quantized model could not return args correctly
if not self.model_forward_args:
self.model_forward_args = ['input_ids', 'attention_mask']
self.model_forward_args = inspect.signature(model.forward).parameters.keys()

# Note: We need to add the FSDP related attributes to the model AFTER the super init,
# so that the (possible) embedding resizing doesn't destroy them
Expand Down

0 comments on commit 10bf60a

Please sign in to comment.