fix typo colossalai/auto_parallel autochunk fx/passes etc. (#3808)

This commit is contained in:
digger yu
2023-05-24 09:01:50 +08:00
committed by GitHub
parent 725365f297
commit 7f8203af69
19 changed files with 31 additions and 31 deletions

View File

@@ -381,7 +381,7 @@ class AlphaBetaProfiler:
first_latency, first_bandwidth = _extract_alpha_beta(first_axis, first_axis_process_group)
second_latency, second_bandwidth = _extract_alpha_beta(second_axis, second_axis_process_group)
mesh_alpha = [first_latency, second_latency]
# The beta values have been enlarged by 1e10 times temporarilly because the computation cost
# The beta values have been enlarged by 1e10 times temporarily because the computation cost
# is still estimated in the unit of TFLOPs instead of time. We will remove this factor in future.
mesh_beta = [1e10 / first_bandwidth, 1e10 / second_bandwidth]