mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-08 20:40:34 +00:00
[autoparallel] support distributed dataloader option (#1906)
* [autoparallel] support distributed dataloader option * update output handler to support ddp dataloader * poish code
This commit is contained in:
@@ -43,8 +43,11 @@ class OperationData:
|
||||
|
||||
def __post_init__(self):
|
||||
# if no logical shape is specified, use the data shape as the logical shape
|
||||
if self.logical_shape is None and isinstance(self.data, torch.Tensor):
|
||||
self.logical_shape = self.data.shape
|
||||
if self.logical_shape is None:
|
||||
if isinstance(self.data, torch.Tensor):
|
||||
self.logical_shape = self.data.shape
|
||||
elif isinstance(self.data, tuple):
|
||||
self.logical_shape = tuple([getattr(d, 'shape', None) for d in self.data])
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f'OperationData(name={self.name}, type={self.type})'
|
||||
|
Reference in New Issue
Block a user