diff --git a/composer/callbacks/mlperf.py b/composer/callbacks/mlperf.py index ddd2f02a761..970e08cacfb 100644 --- a/composer/callbacks/mlperf.py +++ b/composer/callbacks/mlperf.py @@ -263,22 +263,8 @@ def _get_dataloader_stats(self, dataloader: Iterable): if isinstance(dataloader.dataset, IterableDataset): num_samples *= dist.get_world_size() return (dataloader.batch_size, num_samples) - try: - # attempt to import ffcv and test if its an ffcv loader. - import ffcv # type: ignore - - warnings.warn(DeprecationWarning('ffcv is deprecated and will be removed in v0.18')) - - if isinstance(dataloader, ffcv.loader.Loader): - # Use the cached attribute ffcv.init_traversal_order to compute number of samples - return ( - dataloader.batch_size, # type: ignore - len(dataloader.next_traversal_order()) * dist.get_world_size() # type: ignore - ) - except ImportError: - pass - - raise TypeError(f'torch dataloader or ffcv dataloader required (and ffcv installed)') + + raise TypeError(f'torch dataloader required') def fit_start(self, state: State, logger: Logger) -> None: if _global_rank_zero(): diff --git a/composer/callbacks/utils.py b/composer/callbacks/utils.py deleted file mode 100644 index 7a4097cecff..00000000000 --- a/composer/callbacks/utils.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2022 MosaicML Composer authors -# SPDX-License-Identifier: Apache-2.0 - -"""Callback utils.""" - -import warnings -from typing import Callable, Optional, Set, Union - -from composer.core import Event, State, Time -from composer.utils.misc import create_interval_scheduler as _create_interval_scheduler - - -def create_interval_scheduler(interval: Union[str, int, Time], - include_end_of_training: bool = True, - checkpoint_events: bool = True, - final_events: Optional[Set[Event]] = None) -> Callable[[State, Event], bool]: - """Helper function to create a scheduler according to a specified interval. - - Args: - interval (Union[str, int, :class:`.Time`]): If an integer, it will be assumed to be in :attr:`.TimeUnit.EPOCH`. - Otherwise, the unit must be either :attr:`.TimeUnit.EPOCH`, :attr:`.TimeUnit.BATCH`, - :attr:`.TimeUnit.TOKEN`, or :attr:`.TimeUnit.SAMPLE`. - include_end_of_training (bool): If true, the returned callable will return true at the end of training as well. - Otherwise, the returned callable will return true at intervals only. - checkpoint_events (bool): If true, will use the EPOCH_CHECKPOINT and BATCH_CHECKPOINT events. If False, will use - the EPOCH_END and BATCH_END events. - final_events (Optional[Set[Event]]): The set of events to trigger on at the end of training. - - Returns: - Callable[[State, Event], bool]: A function that returns true at interval and at the end of training if specified. - For example, it can be passed as the ``save_interval`` argument into the :class:`.CheckpointSaver`. - """ - warnings.warn( - '`composer.callbacks.utils.create_interval_scheduler has been moved to `composer.utils.misc.create_interval_scheduler` ' - + 'and will be removed in a future release.', - DeprecationWarning, - ) - return _create_interval_scheduler( - interval=interval, - include_end_of_training=include_end_of_training, - checkpoint_events=checkpoint_events, - final_events=final_events, - ) diff --git a/composer/core/state.py b/composer/core/state.py index cc97cb83916..33b28535629 100644 --- a/composer/core/state.py +++ b/composer/core/state.py @@ -733,7 +733,7 @@ def fsdp_sharded_state_dict_enabled(self): @property def fsdp_elastic_sharded_enabled(self): - warnings.warn('state.fsdp_elastic_sharded_enabled is deprecated and will be removed v0.21.0') + warnings.warn('state.fsdp_elastic_sharded_enabled is deprecated and will be removed v0.21.0', DeprecationWarning) return self.fsdp_sharded_state_dict_enabled @property diff --git a/composer/core/time.py b/composer/core/time.py index 98d3745f54f..a94cd787861 100644 --- a/composer/core/time.py +++ b/composer/core/time.py @@ -19,6 +19,7 @@ import datetime import re +import warnings from typing import Any, Dict, Generic, Optional, TypeVar, Union, cast from composer.core.serializable import Serializable @@ -532,21 +533,8 @@ def get_state(self) -> Dict[str, Union[Time[int], datetime.timedelta]]: Returns: Dict[str, Union[Time[int], datetime.timedelta]]: All values of the timestamp object. """ - return { - 'iteration': self.iteration, - 'epoch': self.epoch, - 'batch': self.batch, - 'sample': self.sample, - 'token': self.token, - 'epoch_in_iteration': self.epoch_in_iteration, - 'batch_in_epoch': self.batch_in_epoch, - 'sample_in_epoch': self.sample_in_epoch, - 'token_in_epoch': self.token_in_epoch, - 'total_wct': self.total_wct, - # 'iteration_wct': self.iteration_wct, - 'epoch_wct': self.epoch_wct, - 'batch_wct': self.batch_wct, - } + warnings.warn('core.time.Timestamp.get_state is deprecated and will be removed v0.21.0', DeprecationWarning) + return self.state_dict() def load_state_dict(self, state: Dict[str, Any]) -> None: self._epoch = Time(state['epoch'], TimeUnit.EPOCH) diff --git a/composer/loggers/in_memory_logger.py b/composer/loggers/in_memory_logger.py index 8f5a2c0ea3a..753d1e70e27 100644 --- a/composer/loggers/in_memory_logger.py +++ b/composer/loggers/in_memory_logger.py @@ -9,6 +9,7 @@ from __future__ import annotations import copy +import datetime from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple import numpy as np @@ -157,7 +158,8 @@ def get_timeseries(self, metric: str) -> Dict[str, Any]: timestamp, metric_value = datapoint timeseries.setdefault(metric, []).append(metric_value) # Iterate through time units and add them all! - for field, time in timestamp.get_state().items(): + for field, time in timestamp.state_dict().items(): + assert isinstance(time, Time) or isinstance(time, datetime.timedelta) time_value = time.value if isinstance(time, Time) else time.total_seconds() timeseries.setdefault(field, []).append(time_value) # Convert to numpy arrays