mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-21 01:24:04 +00:00
[legacy] move trainer to legacy (#4545)
* [legacy] move trainer to legacy * [doc] update docs related to trainer * [test] ignore legacy test
This commit is contained in:
@@ -61,7 +61,7 @@ Trainer 的参数 `schedule` 默认值是 `None` 。在大多数情况下,除
|
||||
|
||||
```python
|
||||
from colossalai.logging import get_dist_logger
|
||||
from colossalai.trainer import Trainer, hooks
|
||||
from colossalai.legacy.trainer import Trainer, hooks
|
||||
|
||||
# build components and initialize with colossalai.initialize
|
||||
...
|
||||
@@ -104,7 +104,7 @@ trainer.fit(
|
||||
|
||||
```python
|
||||
from colossalai.logging import get_dist_logger
|
||||
from colossalai.trainer import hooks
|
||||
from colossalai.legacy.trainer import hooks
|
||||
|
||||
class LogMessageHook(hooks.BaseHook):
|
||||
|
||||
@@ -341,7 +341,7 @@ for epoch in range(gpc.config.NUM_EPOCHS):
|
||||
|
||||
```python
|
||||
from colossalai.nn.metric import Accuracy
|
||||
from colossalai.trainer import Trainer, hooks
|
||||
from colossalai.legacy.trainer import Trainer, hooks
|
||||
|
||||
|
||||
# create a trainer object
|
||||
@@ -384,3 +384,4 @@ python -m torch.distributed.launch --nproc_per_node <num_gpus> --master_addr loc
|
||||
# with trainer
|
||||
python -m torch.distributed.launch --nproc_per_node <num_gpus> --master_addr localhost --master_port 29500 run_resnet_cifar10_with_trainer.py
|
||||
```
|
||||
<!-- doc-test-command: echo -->
|
||||
|
@@ -41,7 +41,7 @@ for epoch in range(num_epochs):
|
||||
|
||||
#### 用 trainer 保存
|
||||
```python
|
||||
from colossalai.trainer import Trainer, hooks
|
||||
from colossalai.legacy.trainer import Trainer, hooks
|
||||
model = ...
|
||||
engine, _, _, _ = colossalai.initialize(model=model, ...)
|
||||
trainer = Trainer(engine, ...)
|
||||
@@ -61,3 +61,4 @@ model = ...
|
||||
load_checkpoint('xxx.pt', model)
|
||||
... # train or test
|
||||
```
|
||||
<!-- doc-test-command: echo -->
|
||||
|
Reference in New Issue
Block a user