mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-30 05:55:56 +00:00
[example] integrate autoparallel demo with CI (#2466)
* [example] integrate autoparallel demo with CI * polish code * polish code * polish code * polish code
This commit is contained in:
@@ -1,11 +1,4 @@
|
||||
import argparse
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import torch
|
||||
from titans.utils import barrier_context
|
||||
from torchvision import transforms
|
||||
from torchvision.datasets import CIFAR10
|
||||
from torchvision.models import resnet50
|
||||
from tqdm import tqdm
|
||||
|
||||
@@ -14,9 +7,6 @@ from colossalai.auto_parallel.tensor_shard.initialize import autoparallelize
|
||||
from colossalai.core import global_context as gpc
|
||||
from colossalai.logging import get_dist_logger
|
||||
from colossalai.nn.lr_scheduler import CosineAnnealingLR
|
||||
from colossalai.utils import get_dataloader
|
||||
|
||||
DATA_ROOT = Path(os.environ.get('DATA', '../data')).absolute()
|
||||
|
||||
|
||||
def synthesize_data():
|
||||
@@ -48,9 +38,8 @@ def main():
|
||||
model.train()
|
||||
|
||||
# if we use synthetic data
|
||||
# we assume it only has 30 steps per epoch
|
||||
num_steps = range(30)
|
||||
|
||||
# we assume it only has 10 steps per epoch
|
||||
num_steps = range(10)
|
||||
progress = tqdm(num_steps)
|
||||
|
||||
for _ in progress:
|
||||
@@ -73,8 +62,7 @@ def main():
|
||||
|
||||
# if we use synthetic data
|
||||
# we assume it only has 10 steps for evaluation
|
||||
num_steps = range(30)
|
||||
|
||||
num_steps = range(10)
|
||||
progress = tqdm(num_steps)
|
||||
|
||||
for _ in progress:
|
||||
|
Reference in New Issue
Block a user