[FAW] move coloparam setting in test code. (#1429)

This commit is contained in:
Jiarui Fang
2022-08-10 14:31:53 +08:00
committed by GitHub
parent cb98cf5558
commit 10b3df65c8
2 changed files with 6 additions and 6 deletions

View File

@@ -8,11 +8,9 @@ import random
import colossalai
from colossalai.utils import free_port
from colossalai.testing import rerun_if_address_is_in_use
from colossalai.tensor import ColoParameter
from colossalai.tensor import ColoParameter, ProcessGroup, ShardSpec, ComputePattern, ComputeSpec
from colossalai.nn._ops.cache_embedding import CachedParamMgr, FreqAwareEmbeddingBag, ParallelFreqAwareEmbeddingBag
from colossalai.nn._ops.cache_embedding import CachedParamMgr, FreqAwareEmbeddingBag
NUM_EMBED, EMBED_DIM = 10, 8
BATCH_SIZE = 8
@@ -161,6 +159,11 @@ def run_parallel_freq_aware_embed(rank, world_size):
weight = torch.rand(num_embed, embed_dim)
coloweight = ColoParameter(weight.clone().detach().cpu(), requires_grad=False)
# initialize the tensor spec for the embedding weight parameter,
# which is an ColoParameter.
coloweight.process_group = ProcessGroup(tp_degree=world_size)
coloweight.set_tensor_spec(ShardSpec(dims=[-1], num_partitions=[world_size]), ComputeSpec(ComputePattern.TP1D))
model = ParallelFreqAwareEmbeddingBag.from_pretrained(coloweight,
include_last_offset=True,
freeze=False,