Skip to content

Commit

Permalink
Bump transformers to 4.38.2 (#1018)
Browse files Browse the repository at this point in the history
  • Loading branch information
dakinggg committed Mar 8, 2024
1 parent c2aec30 commit 2b17497
Show file tree
Hide file tree
Showing 3 changed files with 25 additions and 12 deletions.
33 changes: 24 additions & 9 deletions llmfoundry/models/layers/attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -619,19 +619,34 @@ def forward(

value = value.view(bsz, seqlen, self.kv_n_heads * self.head_dim)
elif rotary_emb_w_meta_info['impl'] == 'hf':
(cos, sin) = rotary_emb(value, seq_len)
if is_transformers_version_gte('4.36'):
query, key = apply_rotary_pos_emb(query,
key,
cos,
sin,
offset_info,
if is_transformers_version_gte('4.38'):
(cos, sin) = rotary_emb(x=value,
position_ids=offset_info,
seq_len=None)
else:
(cos, sin) = rotary_emb(x=value, seq_len=seq_len)
if is_transformers_version_gte('4.38'):
query, key = apply_rotary_pos_emb(q=query,
k=key,
cos=cos,
sin=sin,
position_ids=None,
unsqueeze_dim=2)
elif is_transformers_version_gte('4.36'):
query, key = apply_rotary_pos_emb(q=query,
k=key,
cos=cos,
sin=sin,
position_ids=offset_info,
unsqueeze_dim=2)
else:
query = query.transpose(1, 2)
key = key.transpose(1, 2)
query, key = apply_rotary_pos_emb(query, key, cos, sin,
offset_info)
query, key = apply_rotary_pos_emb(q=query,
k=key,
cos=cos,
sin=sin,
position_ids=offset_info)
query = query.transpose(1, 2)
key = key.transpose(1, 2)

Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@
'mosaicml[libcloud,wandb,oci,gcs]>=0.20.1,<0.21',
'mlflow>=2.10,<3',
'accelerate>=0.25,<0.26', # for HF inference `device_map`
'transformers>=4.37,<4.38',
'transformers>=4.38.2,<4.39',
'mosaicml-streaming>=0.7.4,<0.8',
'torch>=2.2.1,<2.3',
'datasets>=2.16,<2.17',
Expand Down
2 changes: 0 additions & 2 deletions tests/models/test_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -1172,12 +1172,10 @@ def test_generate(attention_impl: str, precision: str, pos_emb_config: dict,
max_new_tokens=5,
use_cache=True)
_ = mpt.generate(input_ids=None,
inputs_embeds=None,
max_new_tokens=5,
use_cache=False,
bos_token_id=50256)
_ = mpt.generate(input_ids=None,
inputs_embeds=None,
max_new_tokens=5,
use_cache=True,
bos_token_id=50256)
Expand Down

0 comments on commit 2b17497

Please sign in to comment.