From 3f2be80530ad4ba332b0f375d0493c46fcebe298 Mon Sep 17 00:00:00 2001 From: flybird11111 <1829166702@qq.com> Date: Mon, 3 Jun 2024 11:25:18 +0800 Subject: [PATCH] fix (#5765) --- colossalai/shardformer/policies/llama.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/colossalai/shardformer/policies/llama.py b/colossalai/shardformer/policies/llama.py index 713175c6c..a9c982231 100644 --- a/colossalai/shardformer/policies/llama.py +++ b/colossalai/shardformer/policies/llama.py @@ -351,7 +351,7 @@ class LlamaForCausalLMPolicy(LlamaPolicy): policy = super().module_policy() - if self.shard_config.enable_tensor_parallelism and not self.shard_config.enable_sequence_parallelism: + if self.shard_config.enable_tensor_parallelism: # add a new item for casual lm new_item = { LlamaForCausalLM: ModulePolicyDescription(