mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-11-01 06:19:48 +00:00
[tutorial] edited hands-on practices (#1899)
* Add handson to ColossalAI. * Change names of handsons and edit sequence parallel example. * Edit wrong folder name * resolve conflict * delete readme
This commit is contained in:
6
examples/tutorial/opt/opt/colossalai_zero.py
Normal file
6
examples/tutorial/opt/opt/colossalai_zero.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from colossalai.zero.shard_utils import TensorShardStrategy
|
||||
|
||||
zero = dict(model_config=dict(shard_strategy=TensorShardStrategy(),
|
||||
tensor_placement_policy="auto",
|
||||
reuse_fp16_shard=True),
|
||||
optimizer_config=dict(gpu_margin_mem_ratio=0.8, initial_scale=16384))
|
||||
Reference in New Issue
Block a user