Skip to content

Commit

Permalink
[WIP] llama-70b
Browse files Browse the repository at this point in the history
  • Loading branch information
michaelbenayoun committed Feb 7, 2024
1 parent f734478 commit c21286f
Showing 1 changed file with 9 additions and 5 deletions.
14 changes: 9 additions & 5 deletions optimum/neuron/distributed/parallel_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -841,11 +841,15 @@ def _transform(
if weight_map is not None:
layer_to_fully_qualified_name = {id(module): name for name, module in model.named_modules()}
linear_projection_qualified_name = layer_to_fully_qualified_name[id(linear_projection)]
linear_projection_weight_info, linear_projection_bias_weight_info = cls._get_linear_weight_info(
weight_map,
linear_projection_qualified_name,
device=device,
)
try:
linear_projection_weight_info, linear_projection_bias_weight_info = cls._get_linear_weight_info(
weight_map,
linear_projection_qualified_name,
device=device,
)
except ValueError:
# It means there are no weight available for the linear, but no need to fail here.
pass

parallel_linear_projection = linear_to_parallel_linear(
getattr(linear_projection_parent, linear_projection_attr_name),
Expand Down

0 comments on commit c21286f

Please sign in to comment.