From 1723a0286023132437001773865f1410e9f5e4a0 Mon Sep 17 00:00:00 2001 From: YeAnbang Date: Thu, 10 Apr 2025 10:22:43 +0800 Subject: [PATCH] move empty cache --- applications/ColossalChat/coati/distributed/consumer.py | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/ColossalChat/coati/distributed/consumer.py b/applications/ColossalChat/coati/distributed/consumer.py index 6372e2c3a..79beb2a2d 100644 --- a/applications/ColossalChat/coati/distributed/consumer.py +++ b/applications/ColossalChat/coati/distributed/consumer.py @@ -129,6 +129,7 @@ class BaseConsumer: if episode != self.num_episodes - 1 or step != self.num_update_per_episode - 1: print(f"[T{dist.get_rank()}] Sync model episode {episode} step {step}") + torch.cuda.empty_cache() state_dict = self.state_dict() if self.rank == 0: ray_broadcast_tensor_dict(