From 654aefc3c3b153835e31204602e70d30a503a20d Mon Sep 17 00:00:00 2001 From: YeAnbang Date: Fri, 16 May 2025 14:15:35 +0800 Subject: [PATCH] address conversation --- applications/ColossalChat/coati/distributed/producer.py | 4 ++-- applications/ColossalChat/rl_example.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/ColossalChat/coati/distributed/producer.py b/applications/ColossalChat/coati/distributed/producer.py index ba0646d6a..82f57094d 100644 --- a/applications/ColossalChat/coati/distributed/producer.py +++ b/applications/ColossalChat/coati/distributed/producer.py @@ -187,7 +187,7 @@ class BaseProducer: for eval_task_name in self.eval_dataloaders: if self.producer_idx == 0: print( - f"[P{self.producer_idx}] Evaluate consumer step {self.consumer_global_step} on task {eval_task_name}" + f"[P{self.producer_idx}] Evaluate model at training step {self.consumer_global_step} on task {eval_task_name}" ) eval_results = [] eval_statistics_tensor = torch.zeros((2,), dtype=torch.float32).to(self.device) @@ -220,7 +220,7 @@ class BaseProducer: safe_append_to_jsonl_file( os.path.join( self.eval_save_dir, - f"{eval_task_name}_episode_{episode}_step_{self.consumer_global_step}.jsonl", + f"{eval_task_name}_training_step_{self.consumer_global_step}.jsonl", ), eval_results, ) diff --git a/applications/ColossalChat/rl_example.py b/applications/ColossalChat/rl_example.py index e5ff7d6d2..0bce3c41d 100644 --- a/applications/ColossalChat/rl_example.py +++ b/applications/ColossalChat/rl_example.py @@ -100,7 +100,7 @@ if __name__ == "__main__": "--eval-interval", type=int, default=100, - help="Interval for evaluation. Evaluate every ei consumer steps.", + help="Interval for evaluation. Evaluate every ei training steps.", ) # Logging/Checkpointing parameters