mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-05 19:13:01 +00:00
[pipeline] support fp32 for HybridPlugin/merge shardformer test and pipeline test into one file (#4354)
* add naive optimizer for 3DPlugin/refactor gpt2 shardformer test * merge tests of PP/DP/TP combinations into one test file * fix bug when sync grad for dp in HybridPlugin * update supported precisions for 3DPlugin/fix bug when shifting tp_degree * improve the passing of lazy_init * modify lazy_init/use sync_shared_params
This commit is contained in:
committed by
Hongxin Liu
parent
f13954cd58
commit
0ceec8f9a9
@@ -16,6 +16,11 @@ from .sharding_spec import ShardingSpec
|
||||
layout_converter = LayoutConverter()
|
||||
|
||||
|
||||
def clear_layout_converter():
|
||||
global layout_converter
|
||||
layout_converter.cached_solution.clear()
|
||||
|
||||
|
||||
def is_distributed_tensor(tensor: torch.Tensor) -> bool:
|
||||
"""
|
||||
Check whether the given tensor is a distributed tensor.
|
||||
|
Reference in New Issue
Block a user