mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-08 04:24:47 +00:00
[legacy] move communication and nn to legacy and refactor logger (#4671)
* [legacy] move communication to legacy (#4640) * [legacy] refactor logger and clean up legacy codes (#4654) * [legacy] make logger independent to gpc * [legacy] make optim independent to registry * [legacy] move test engine to legacy * [legacy] move nn to legacy (#4656) * [legacy] move nn to legacy * [checkpointio] fix save hf config * [test] remove useledd rpc pp test * [legacy] fix nn init * [example] skip tutorial hybriad parallel example * [devops] test doc check * [devops] test doc check
This commit is contained in:
15
colossalai/legacy/nn/layer/utils/__init__.py
Normal file
15
colossalai/legacy/nn/layer/utils/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from .common import (
|
||||
ACT2FN,
|
||||
CheckpointModule,
|
||||
_ntuple,
|
||||
divide,
|
||||
get_tensor_parallel_mode,
|
||||
set_tensor_parallel_attribute_by_partition,
|
||||
set_tensor_parallel_attribute_by_size,
|
||||
to_2tuple,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'CheckpointModule', 'divide', 'ACT2FN', 'set_tensor_parallel_attribute_by_size',
|
||||
'set_tensor_parallel_attribute_by_partition', 'get_tensor_parallel_mode', '_ntuple', 'to_2tuple'
|
||||
]
|
Reference in New Issue
Block a user