[autoparallel] fix forward memory calculation (#2062)

This commit is contained in:
Boyuan Yao
2022-12-04 15:00:16 +08:00
committed by GitHub
parent 44ea461890
commit 4b40fbd743
5 changed files with 29 additions and 24 deletions

View File

@@ -49,10 +49,12 @@ def relu_meta_info(*args, **kwargs) -> Tuple[TrainCycleItem, TrainCycleItem, Lis
# calculate memory cost
# NOTE: the inplace ReLU don't have forward memory cost
fwd_memory_cost = MemoryCost(activation=0 if inplace else activation_size(output_tensor),
parameter=0,
temp=0,
buffer=0)
# NOTE: currently in SPMD solver we always believe that there will be a new tensor created in forward
fwd_memory_cost = MemoryCost(
activation=activation_size(input_tensor) if inplace else activation_size([output_tensor, input_tensor]),
parameter=0,
temp=0,
buffer=0)
bwd_memory_cost = MemoryCost(activation=activation_size(input_tensor), parameter=0, temp=0, buffer=0)