mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-16 22:52:25 +00:00
fix typo examples/community/roberta (#3925)
This commit is contained in:
@@ -97,7 +97,7 @@ def throughput_calculator(numel, args, config, iteration_time, total_iterations,
|
||||
def synchronize():
|
||||
if not torch.distributed.is_available():
|
||||
return
|
||||
if not torch.distributed.is_intialized():
|
||||
if not torch.distributed.is_initialized():
|
||||
return
|
||||
world_size = torch.distributed.get_world_size()
|
||||
if world_size == 1:
|
||||
|
@@ -110,7 +110,7 @@ class Timers:
|
||||
"""Write timers to a tensorboard writer"""
|
||||
# currently when using add_scalars,
|
||||
# torch.utils.add_scalars makes each timer its own run, which
|
||||
# polutes the runs list, so we just add each as a scalar
|
||||
# pollutes the runs list, so we just add each as a scalar
|
||||
assert normalizer > 0.0
|
||||
for name in names:
|
||||
value = self.timers[name].elapsed(reset=reset) / normalizer
|
||||
|
Reference in New Issue
Block a user