mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-01 09:07:51 +00:00
[autoparallel] modify comm nodes' memory cost in construct chain (#2263)
* [autoparallel] align the data_ptr with the old version of auto activation checkpoint pipeline * [autoparallel] using fwd_time and bwd_time instead of fwd_flop and bwd_flop * [autoparallel] specifycomm nodes' memory cost in construct chain
This commit is contained in:
@@ -100,7 +100,7 @@ def calculate_fwd_time(n: Node) -> float:
|
||||
fwd_time (float): the result of `fwd_time`
|
||||
"""
|
||||
# TODO(super-dainiu): should divide the time by the number of GPUs as well as TFLOPs
|
||||
return n.meta["fwd_flop"]
|
||||
return n.meta["fwd_time"]
|
||||
|
||||
|
||||
def calculate_bwd_time(n: Node) -> float:
|
||||
@@ -111,4 +111,4 @@ def calculate_bwd_time(n: Node) -> float:
|
||||
bwd_time (float): the result of `bwd_time`
|
||||
"""
|
||||
# TODO(super-dainiu): should divide the time by the number of GPUs as well as TFLOPs
|
||||
return n.meta["bwd_flop"]
|
||||
return n.meta["bwd_time"]
|
||||
|
Reference in New Issue
Block a user