mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-08-14 14:13:22 +00:00
fix
This commit is contained in:
parent
5c56a7fd7b
commit
de4f7a1d25
@ -3,10 +3,11 @@ from torch.optim import Adam
|
|||||||
|
|
||||||
import colossalai
|
import colossalai
|
||||||
from colossalai.booster.mixed_precision import FP16TorchMixedPrecision
|
from colossalai.booster.mixed_precision import FP16TorchMixedPrecision
|
||||||
from colossalai.testing import rerun_if_address_is_in_use, spawn
|
from colossalai.testing import rerun_if_address_is_in_use, spawn, clear_cache_before_run
|
||||||
from tests.kit.model_zoo import model_zoo
|
from tests.kit.model_zoo import model_zoo
|
||||||
|
|
||||||
|
|
||||||
|
@clear_cache_before_run()
|
||||||
def run_torch_amp(rank, world_size, port):
|
def run_torch_amp(rank, world_size, port):
|
||||||
# init dist env
|
# init dist env
|
||||||
colossalai.launch(rank=rank, world_size=world_size, port=port, host="localhost")
|
colossalai.launch(rank=rank, world_size=world_size, port=port, host="localhost")
|
||||||
|
Loading…
Reference in New Issue
Block a user