14
14
import torch .utils .data
15
15
from torch .utils .data .distributed import DistributedSampler
16
16
17
- from composer .utils import VersionedDeprecationWarning , dist , ensure_tuple
17
+ from composer .utils import dist , ensure_tuple
18
18
19
19
if TYPE_CHECKING :
20
20
from composer .core .types import Batch
@@ -155,10 +155,6 @@ class DataSpec:
155
155
num_tokens (int, optional): The total number of tokens in an epoch. This field is used by the
156
156
:class:`.Timestamp` (training progress tracker).
157
157
158
- device_transforms ((Batch) -> Batch, optional): Deprecated argument. Please use ``batch_transforms`` for batch
159
- level transformations on CPU and ``microbatch_transforms`` for microbatch level transformations on target
160
- device.
161
-
162
158
batch_transforms ((Batch) -> Batch, optional): Function called by the :class:`.Trainer` to modify the
163
159
batch before it is moved onto the device. For example, this function can be used for CPU-based
164
160
normalization. It can modify the batch in-place, and it should return the modified batch. If not specified,
@@ -194,7 +190,6 @@ def __init__(
194
190
dataloader : Union [Iterable , torch .utils .data .DataLoader ],
195
191
num_samples : Optional [int ] = None ,
196
192
num_tokens : Optional [int ] = None ,
197
- device_transforms : Optional [Callable [[Batch ], Batch ]] = None ,
198
193
batch_transforms : Optional [Callable [[Batch ], Batch ]] = None ,
199
194
microbatch_transforms : Optional [Callable [[Batch ], Batch ]] = None ,
200
195
split_batch : Optional [Callable [[Batch , Union [int , float ]], Sequence [Batch ]]] = None ,
@@ -203,22 +198,6 @@ def __init__(
203
198
) -> None :
204
199
self .dataloader : Union [Iterable , torch .utils .data .DataLoader ] = dataloader
205
200
self .num_tokens = num_tokens
206
- if device_transforms is not None :
207
- if batch_transforms is not None :
208
- raise ValueError (
209
- 'Cannot specify both `device_transforms` and `batch_transforms`. Please use `batch_transforms` for '
210
- 'batch level transformations on CPU and `microbatch_transforms` for microbatch level transformations '
211
- 'on target device.' ,
212
- )
213
- warnings .warn (
214
- VersionedDeprecationWarning (
215
- 'The `device_transforms` argument is deprecated. Please use `batch_transforms` for batch level '
216
- 'transformations on CPU and `microbatch_transforms` for microbatch level transformations on target '
217
- 'device.' ,
218
- 'v0.29.0' ,
219
- ),
220
- )
221
- self .batch_transforms = device_transforms
222
201
self .batch_transforms = self ._default_transforms if batch_transforms is None else batch_transforms
223
202
self .microbatch_transforms = self ._default_transforms if microbatch_transforms is None else microbatch_transforms
224
203
self .split_batch = default_split_batch if split_batch is None else split_batch
0 commit comments