mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-18 07:31:19 +00:00
fix typo colossalai/auto_parallel autochunk fx/passes etc. (#3808)
This commit is contained in:
@@ -381,7 +381,7 @@ class AlphaBetaProfiler:
|
||||
first_latency, first_bandwidth = _extract_alpha_beta(first_axis, first_axis_process_group)
|
||||
second_latency, second_bandwidth = _extract_alpha_beta(second_axis, second_axis_process_group)
|
||||
mesh_alpha = [first_latency, second_latency]
|
||||
# The beta values have been enlarged by 1e10 times temporarilly because the computation cost
|
||||
# The beta values have been enlarged by 1e10 times temporarily because the computation cost
|
||||
# is still estimated in the unit of TFLOPs instead of time. We will remove this factor in future.
|
||||
mesh_beta = [1e10 / first_bandwidth, 1e10 / second_bandwidth]
|
||||
|
||||
|
Reference in New Issue
Block a user