From a0f8680e854ab9bb761ad0465f59292cebfbe495 Mon Sep 17 00:00:00 2001 From: Tong Li Date: Fri, 27 Jun 2025 09:52:50 +0800 Subject: [PATCH] fix update --- applications/ColossalChat/rl_example.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/ColossalChat/rl_example.py b/applications/ColossalChat/rl_example.py index 1c77b69e6..d7b7c2a5d 100644 --- a/applications/ColossalChat/rl_example.py +++ b/applications/ColossalChat/rl_example.py @@ -248,7 +248,7 @@ if __name__ == "__main__": ) if args.enable_profiling: # If profiling is enabled, we force model to generate to max_new_tokens - inference_model_config.update( + generate_config.update( dict( max_tokens=args.max_new_tokens, # max new tokens ignore_eos=True,