diff --git a/fairscale/nn/misc/flatten_params_wrapper.py b/fairscale/nn/misc/flatten_params_wrapper.py index 455442e69..7298a051e 100644 --- a/fairscale/nn/misc/flatten_params_wrapper.py +++ b/fairscale/nn/misc/flatten_params_wrapper.py @@ -372,13 +372,13 @@ def _unflatten_params_as_views(self) -> None: self.flat_param unchanged. """ assert self.is_flattened - logger.info(f"CHRISLOG: {self._require_backward_grad_sync=}") + #logger.info(f"CHRISLOG: {self._require_backward_grad_sync=}") if self._require_backward_grad_sync: - logger.info("CHRISLOG: calling self.get_param_views() without torch.no_grad()") + #logger.info("CHRISLOG: calling self.get_param_views() without torch.no_grad()") ps = self.get_param_views() else: with torch.no_grad(): - logger.info("CHRISLOG: calling self.get_param_views() with torch.no_grad()") + #logger.info("CHRISLOG: calling self.get_param_views() with torch.no_grad()") ps = self.get_param_views() param_views = []