From 08a1244ef1c8b53731637fe911f22b7f934cfc28 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 6 Aug 2025 06:16:37 +0000 Subject: [PATCH] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- applications/ColossalChat/coati/distributed/grpo_consumer.py | 2 +- colossalai/shardformer/modeling/qwen3.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/ColossalChat/coati/distributed/grpo_consumer.py b/applications/ColossalChat/coati/distributed/grpo_consumer.py index a2c3e03d6..424d46098 100644 --- a/applications/ColossalChat/coati/distributed/grpo_consumer.py +++ b/applications/ColossalChat/coati/distributed/grpo_consumer.py @@ -530,4 +530,4 @@ class GRPOConsumer(BaseConsumer): model = self.policy_model.unwrap() state_dict = model.state_dict() state_dict["consumer_global_step"] = torch.tensor([self.global_step], device=self.device) - return state_dict \ No newline at end of file + return state_dict diff --git a/colossalai/shardformer/modeling/qwen3.py b/colossalai/shardformer/modeling/qwen3.py index 437693800..5f96f5f49 100644 --- a/colossalai/shardformer/modeling/qwen3.py +++ b/colossalai/shardformer/modeling/qwen3.py @@ -273,7 +273,7 @@ class Qwen3PipelineForwards: hidden_states: Optional[torch.FloatTensor] = None, stage_index: Optional[List[int]] = None, shard_config: ShardConfig = None, - **kwargs + **kwargs, ): r""" Args: