[exampe] update llama example (#5626)

* [plugin] support dp inside for hybriad parallel

* [example] update llama benchmark

* [example] update llama benchmark

* [example] update llama readme

* [example] update llama readme
This commit is contained in:
Hongxin Liu
2024-04-23 14:12:20 +08:00
committed by GitHub
parent 862fbaaa62
commit 4de4e31818
8 changed files with 72 additions and 783 deletions

View File

@@ -424,6 +424,7 @@ class GeminiPlugin(DPPluginBase):
)
self.extra_dp_group = self.pg_mesh.get_group_along_axis(DP_AXIS) if self.extra_dp_size > 1 else None
self.tp_group = self.pg_mesh.get_group_along_axis(TP_AXIS) if self.tp_size > 1 else None
self.dp_size = self.zero_size * self.extra_dp_size
self.shard_config = ShardConfig(
tensor_parallel_process_group=self.tp_group,