diff --git a/colossalai/booster/plugin/hybrid_parallel_plugin.py b/colossalai/booster/plugin/hybrid_parallel_plugin.py index e5acdb051..e359957f5 100644 --- a/colossalai/booster/plugin/hybrid_parallel_plugin.py +++ b/colossalai/booster/plugin/hybrid_parallel_plugin.py @@ -1332,7 +1332,7 @@ class HybridParallelPlugin(PipelinePluginBase): or not torch.is_grad_enabled() ): return outputs - + print("Show torch status:", torch.is_grad_enabled()) # Synchronize the grads of shared parameters of the model. model.sync_shared_params() # Synchronize sequence parallelism gradients of the model.