[Shardformer] add assert for num of attention heads divisible by tp_size (#5670)

* add assert for num of attention heads divisible by tp_size

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
Wang Binluo
2024-04-29 05:47:47 -05:00
committed by GitHub
parent 6af6d6fc9f
commit d3f34ee8cc
13 changed files with 48 additions and 0 deletions

View File

@@ -78,6 +78,9 @@ class WhisperPolicy(Policy):
warnings.warn("Whisper doesn't support jit fused operator now, will ignore the jit fused operator flag.")
if self.shard_config.enable_tensor_parallelism:
assert (
self.model.config.encoder_attention_heads % self.shard_config.tensor_parallel_size == 0
), f"The number of attention heads must be divisible by tensor parallel size."
policy[WhisperEncoderLayer] = ModulePolicyDescription(
attribute_replacement={
"self_attn.embed_dim": self.model.config.d_model // self.shard_config.tensor_parallel_size,