Skip to content

Commit

Permalink
Merge branch 'dev' into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
mvpatel2000 committed Feb 13, 2024
2 parents 30d9598 + 0d041ac commit abc0e71
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -75,21 +75,11 @@ def apply(self, event: Event, state: State, logger: Logger) -> Optional[int]:

class LPLayerNorm(torch.nn.LayerNorm):

def __init__(
self,
normalized_shape,
eps: float = 1e-05,
elementwise_affine: bool = True,
*,
bias: bool = True,
device=None,
dtype=None,
):
def __init__(self, normalized_shape, eps=1e-05, elementwise_affine=True, device=None, dtype=None):
super().__init__(
normalized_shape=normalized_shape,
eps=eps,
elementwise_affine=elementwise_affine,
bias=bias,
device=device,
dtype=dtype,
)
Expand Down Expand Up @@ -121,7 +111,7 @@ def _to_LPLayerNorm(layer: torch.nn.Module, module_index: int) -> LPLayerNorm:
"""Defines a replacement policy from a `torch.nn.LayerNorm` to a `LPLayerNorm`"""
if not isinstance(layer, torch.nn.LayerNorm):
raise TypeError(f'Expected torch.nn.LayerNorm, got {type(layer)}')
lp_layernorm = LPLayerNorm(layer.normalized_shape, layer.eps, layer.elementwise_affine) # type: ignore
lp_layernorm = LPLayerNorm(layer.normalized_shape, layer.eps, layer.elementwise_affine)

with torch.no_grad():
if layer.weight is None: # pyright: ignore[reportUnnecessaryComparison]
Expand Down
2 changes: 1 addition & 1 deletion composer/cli/launcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@ def _launch_processes(
MASTER_ADDR=master_addr,
MASTER_PORT=str(master_port),
PYTHONUNBUFFERED='1',
TORCH_NCCL_ASYNC_ERROR_HANDLING='1',
NCCL_ASYNC_ERROR_HANDLING='1',
):
# Populate the distributed variables in all launcher args
for arg in training_script_args:
Expand Down

0 comments on commit abc0e71

Please sign in to comment.