mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-11-28 03:46:58 +00:00
[legacy] clean up legacy code (#4743)
* [legacy] remove outdated codes of pipeline (#4692) * [legacy] remove cli of benchmark and update optim (#4690) * [legacy] remove cli of benchmark and update optim * [doc] fix cli doc test * [legacy] fix engine clip grad norm * [legacy] remove outdated colo tensor (#4694) * [legacy] remove outdated colo tensor * [test] fix test import * [legacy] move outdated zero to legacy (#4696) * [legacy] clean up utils (#4700) * [legacy] clean up utils * [example] update examples * [legacy] clean up amp * [legacy] fix amp module * [legacy] clean up gpc (#4742) * [legacy] clean up context * [legacy] clean core, constants and global vars * [legacy] refactor initialize * [example] fix examples ci * [example] fix examples ci * [legacy] fix tests * [example] fix gpt example * [example] fix examples ci * [devops] fix ci installation * [example] fix examples ci
This commit is contained in:
@@ -2,7 +2,7 @@ try:
|
||||
from colossalai.zero.shard_utils import TensorShardStrategy
|
||||
except ImportError:
|
||||
# colossalai > 0.2.8
|
||||
from colossalai.zero.legacy import TensorShardStrategy
|
||||
from colossalai.legacy.zero import TensorShardStrategy
|
||||
|
||||
zero = dict(model_config=dict(shard_strategy=TensorShardStrategy(),
|
||||
tensor_placement_policy="auto",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import torch.distributed as dist
|
||||
|
||||
from colossalai.context import ParallelMode
|
||||
from colossalai.core import global_context as gpc
|
||||
from colossalai.legacy.context import ParallelMode
|
||||
from colossalai.legacy.core import global_context as gpc
|
||||
|
||||
|
||||
class barrier_context():
|
||||
|
||||
@@ -51,12 +51,13 @@ from transformers import (
|
||||
from transformers.utils.versions import require_version
|
||||
|
||||
import colossalai
|
||||
from colossalai.context import ParallelMode
|
||||
from colossalai.core import global_context as gpc
|
||||
from colossalai.legacy.context import ParallelMode
|
||||
from colossalai.legacy.core import global_context as gpc
|
||||
from colossalai.legacy.tensor import ProcessGroup
|
||||
from colossalai.legacy.utils import get_dataloader
|
||||
from colossalai.logging import disable_existing_loggers, get_dist_logger
|
||||
from colossalai.nn.optimizer import HybridAdam
|
||||
from colossalai.tensor import ProcessGroup
|
||||
from colossalai.utils import get_current_device, get_dataloader
|
||||
from colossalai.utils import get_current_device
|
||||
from colossalai.zero import GeminiOptimizer
|
||||
|
||||
require_version("datasets>=1.8.0", "To fix: pip install -r examples/pytorch/language-modeling/requirements.txt")
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -xue
|
||||
echo "this test is outdated"
|
||||
# pip install -r requirements.txt
|
||||
|
||||
pip install -r requirements.txt
|
||||
# BS=4
|
||||
# MEMCAP=0
|
||||
# GPUNUM=4
|
||||
# MODLE="facebook/opt-125m"
|
||||
|
||||
BS=4
|
||||
MEMCAP=0
|
||||
GPUNUM=4
|
||||
MODLE="facebook/opt-125m"
|
||||
|
||||
torchrun \
|
||||
--nproc_per_node ${GPUNUM} \
|
||||
--master_port 19198 \
|
||||
run_clm.py \
|
||||
-s \
|
||||
--output_dir $PWD \
|
||||
--mem_cap ${MEMCAP} \
|
||||
--model_name_or_path ${MODLE} \
|
||||
--per_device_train_batch_size ${BS} \
|
||||
--num_train_epochs 1
|
||||
# torchrun \
|
||||
# --nproc_per_node ${GPUNUM} \
|
||||
# --master_port 19198 \
|
||||
# run_clm.py \
|
||||
# -s \
|
||||
# --output_dir $PWD \
|
||||
# --mem_cap ${MEMCAP} \
|
||||
# --model_name_or_path ${MODLE} \
|
||||
# --per_device_train_batch_size ${BS} \
|
||||
# --num_train_epochs 1
|
||||
|
||||
Reference in New Issue
Block a user