mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-13 13:11:05 +00:00
[NFC] polish colossalai/context/process_group_initializer/initializer_sequence.py colossalai/context/process_group_initializer initializer_tensor.py code style (#639)
Co-authored-by: 何晓昕 <cautious@r-236-100-25-172.comp.nus.edu.sg>
This commit is contained in:
@@ -69,8 +69,8 @@ class Initializer_Sequence(ProcessGroupInitializer):
|
||||
pipeline_parallel_size (int): Size of pipeline parallel.
|
||||
tensor_parallel_size (int): Size of tensor parallel.
|
||||
"""
|
||||
def __init__(self,
|
||||
*args, **kwargs):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
# reuse tensor parallel initializer code
|
||||
self._sequence_initializer = Initializer_Tensor(*args, **kwargs)
|
||||
|
@@ -20,6 +20,7 @@ class Initializer_Tensor(ProcessGroupInitializer):
|
||||
pipeline_parallel_size (int): Size of pipeline parallel.
|
||||
tensor_parallel_size (int): Size of tensor parallel.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.num_tensor_parallel_group = self.world_size // self.tensor_parallel_size
|
||||
|
Reference in New Issue
Block a user