mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-05-30 19:05:26 +00:00
* [shardformer] implement policy for all GPT-J models and test * [shardformer] support interleaved pipeline parallel for bert finetune * [shardformer] shardformer support falcon (#4883) * [shardformer]: fix interleaved pipeline for bert model (#5048) * [hotfix]: disable seq parallel for gptj and falcon, and polish code (#5093) * Add Mistral support for Shardformer (#5103) * [shardformer] add tests to mistral (#5105) --------- Co-authored-by: Pengtai Xu <henryxu880@gmail.com> Co-authored-by: ppt0011 <143150326+ppt0011@users.noreply.github.com> Co-authored-by: flybird11111 <1829166702@qq.com> Co-authored-by: eric8607242 <e0928021388@gmail.com>
18 lines
619 B
Python
18 lines
619 B
Python
from .colo_init_context import ColoInitContext, post_process_colo_init_ctx
|
|
from .ophooks import BaseOpHook, register_ophooks_recursively
|
|
from .stateful_tensor import StatefulTensor
|
|
from .stateful_tensor_mgr import StatefulTensorMgr
|
|
from .tensor_placement_policy import AutoTensorPlacementPolicy, CPUTensorPlacementPolicy, CUDATensorPlacementPolicy
|
|
|
|
__all__ = [
|
|
"StatefulTensorMgr",
|
|
"StatefulTensor",
|
|
"CPUTensorPlacementPolicy",
|
|
"CUDATensorPlacementPolicy",
|
|
"AutoTensorPlacementPolicy",
|
|
"register_ophooks_recursively",
|
|
"BaseOpHook",
|
|
"ColoInitContext",
|
|
"post_process_colo_init_ctx",
|
|
]
|