From 017188466439ab3c5e403048f4c76f008ef4fb24 Mon Sep 17 00:00:00 2001 From: YeAnbang Date: Thu, 20 Feb 2025 17:25:36 +0800 Subject: [PATCH] fix inference rebatching bug --- applications/ColossalChat/coati/experience_maker/naive.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/ColossalChat/coati/experience_maker/naive.py b/applications/ColossalChat/coati/experience_maker/naive.py index c7ad4f316..81f8fb80c 100755 --- a/applications/ColossalChat/coati/experience_maker/naive.py +++ b/applications/ColossalChat/coati/experience_maker/naive.py @@ -140,7 +140,7 @@ class NaiveExperienceMaker(ExperienceMaker): num_actions = 0 for inference_mini_batch_id in range(0, input_ids.size(0), self.inference_batch_size): - s, e = inference_mini_batch_id, (inference_mini_batch_id + 1) * self.inference_batch_size + s, e = inference_mini_batch_id, inference_mini_batch_id + self.inference_batch_size if input_ids[s:e].size(0) == 0: break sequences = generate(self.actor, input_ids[s:e], self.tokenizer, **generate_kwargs)