From 409f4b5ab39fa48f7494815de704bc171e37522a Mon Sep 17 00:00:00 2001 From: Tong Li Date: Wed, 14 Aug 2024 07:19:34 +0000 Subject: [PATCH] update --- colossalai/booster/plugin/hybrid_parallel_plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/colossalai/booster/plugin/hybrid_parallel_plugin.py b/colossalai/booster/plugin/hybrid_parallel_plugin.py index e5acdb051..e359957f5 100644 --- a/colossalai/booster/plugin/hybrid_parallel_plugin.py +++ b/colossalai/booster/plugin/hybrid_parallel_plugin.py @@ -1332,7 +1332,7 @@ class HybridParallelPlugin(PipelinePluginBase): or not torch.is_grad_enabled() ): return outputs - + print("Show torch status:", torch.is_grad_enabled()) # Synchronize the grads of shared parameters of the model. model.sync_shared_params() # Synchronize sequence parallelism gradients of the model.