mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-27 12:43:02 +00:00
[Shardformer] add assert for num of attention heads divisible by tp_size (#5670)
* add assert for num of attention heads divisible by tp_size * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
@@ -78,6 +78,9 @@ class WhisperPolicy(Policy):
|
||||
warnings.warn("Whisper doesn't support jit fused operator now, will ignore the jit fused operator flag.")
|
||||
|
||||
if self.shard_config.enable_tensor_parallelism:
|
||||
assert (
|
||||
self.model.config.encoder_attention_heads % self.shard_config.tensor_parallel_size == 0
|
||||
), f"The number of attention heads must be divisible by tensor parallel size."
|
||||
policy[WhisperEncoderLayer] = ModulePolicyDescription(
|
||||
attribute_replacement={
|
||||
"self_attn.embed_dim": self.model.config.d_model // self.shard_config.tensor_parallel_size,
|
||||
|
Reference in New Issue
Block a user