mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-16 14:41:53 +00:00
[shardformer]: support gpt-j, falcon, Mistral and add interleaved pipeline for bert (#5088)
* [shardformer] implement policy for all GPT-J models and test * [shardformer] support interleaved pipeline parallel for bert finetune * [shardformer] shardformer support falcon (#4883) * [shardformer]: fix interleaved pipeline for bert model (#5048) * [hotfix]: disable seq parallel for gptj and falcon, and polish code (#5093) * Add Mistral support for Shardformer (#5103) * [shardformer] add tests to mistral (#5105) --------- Co-authored-by: Pengtai Xu <henryxu880@gmail.com> Co-authored-by: ppt0011 <143150326+ppt0011@users.noreply.github.com> Co-authored-by: flybird11111 <1829166702@qq.com> Co-authored-by: eric8607242 <e0928021388@gmail.com>
This commit is contained in:
@@ -1,11 +1,4 @@
|
||||
from .gemini import (
|
||||
ColoInitContext,
|
||||
GeminiAdamOptimizer,
|
||||
GeminiDDP,
|
||||
GeminiOptimizer,
|
||||
get_static_torch_model,
|
||||
post_process_colo_init_ctx,
|
||||
)
|
||||
from .gemini import GeminiAdamOptimizer, GeminiDDP, GeminiOptimizer, get_static_torch_model
|
||||
from .low_level import LowLevelZeroOptimizer
|
||||
from .wrapper import zero_model_wrapper, zero_optim_wrapper
|
||||
|
||||
@@ -16,7 +9,5 @@ __all__ = [
|
||||
"zero_model_wrapper",
|
||||
"zero_optim_wrapper",
|
||||
"LowLevelZeroOptimizer",
|
||||
"ColoInitContext",
|
||||
"post_process_colo_init_ctx",
|
||||
"get_static_torch_model",
|
||||
]
|
||||
|
Reference in New Issue
Block a user