mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-07-29 14:30:40 +00:00
[tutorial] updated auto parallel demo with latest data path (#1917)
This commit is contained in:
parent
d53415bc10
commit
acd9abc5ca
@ -2,7 +2,8 @@
|
|||||||
|
|
||||||
## Prepare Dataset
|
## Prepare Dataset
|
||||||
|
|
||||||
We use CIFAR10 dataset in this example. The dataset will be downloaded to `./data` by default.
|
We use CIFAR10 dataset in this example. You should invoke the `donwload_cifar10.py` in the tutorial root directory or directly run the `auto_parallel_with_resnet.py`.
|
||||||
|
The dataset will be downloaded to `colossalai/examples/tutorials/data` by default.
|
||||||
If you wish to use customized directory for the dataset. You can set the environment variable `DATA` via the following command.
|
If you wish to use customized directory for the dataset. You can set the environment variable `DATA` via the following command.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@ -13,7 +14,7 @@ export DATA=/path/to/data
|
|||||||
## Run on 2*2 device mesh
|
## Run on 2*2 device mesh
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
colossalai run --nproc_per_node 4 auto_parallel_demo.py
|
colossalai run --nproc_per_node 4 auto_parallel_with_resnet.py
|
||||||
```
|
```
|
||||||
|
|
||||||
## Auto Checkpoint Benchmarking
|
## Auto Checkpoint Benchmarking
|
||||||
|
@ -24,7 +24,7 @@ from colossalai.logging import get_dist_logger
|
|||||||
from colossalai.nn.lr_scheduler import CosineAnnealingLR
|
from colossalai.nn.lr_scheduler import CosineAnnealingLR
|
||||||
from colossalai.utils import get_dataloader
|
from colossalai.utils import get_dataloader
|
||||||
|
|
||||||
DATA_ROOT = Path(os.environ.get('DATA', './data'))
|
DATA_ROOT = Path(os.environ.get('DATA', '../data')).absolute()
|
||||||
BATCH_SIZE = 1024
|
BATCH_SIZE = 1024
|
||||||
NUM_EPOCHS = 10
|
NUM_EPOCHS = 10
|
||||||
|
|
Loading…
Reference in New Issue
Block a user