[Shardformer] add assert for num of attention heads divisible by tp_size (#5670)

* add assert for num of attention heads divisible by tp_size

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
Wang Binluo
2024-04-29 05:47:47 -05:00
committed by GitHub
parent 6af6d6fc9f
commit d3f34ee8cc
13 changed files with 48 additions and 0 deletions

View File

@@ -84,6 +84,9 @@ class GPT2Policy(Policy):
self.shard_config.enable_flash_attention = False
use_flash_attention = False
if self.shard_config.enable_tensor_parallelism:
assert (
self.model.config.num_attention_heads % self.shard_config.tensor_parallel_size == 0
), f"The number of attention heads must be divisible by tensor parallel size."
policy[GPT2Model] = ModulePolicyDescription(
sub_module_replacement=[
SubModuleReplacementDescription(