[Tensor] add embedding tp1d row (#904)

This commit is contained in:
Ziyue Jiang
2022-04-29 14:10:05 +08:00
committed by GitHub
parent 16122d5fac
commit f593a5637e
5 changed files with 108 additions and 8 deletions

View File

@@ -5,7 +5,6 @@ from .utils.dummy_data_generator import DummyDataGenerator
from .registry import non_distributed_component_funcs
from colossalai.utils.cuda import get_current_device
class SimpleNet(CheckpointModule):
"""
In this no-leaf module, it has subordinate nn.modules and a nn.Parameter.
@@ -13,12 +12,14 @@ class SimpleNet(CheckpointModule):
def __init__(self, checkpoint=False) -> None:
super().__init__(checkpoint=checkpoint)
self.embed = nn.Embedding(20, 4)
self.proj1 = nn.Linear(4, 8)
self.ln1 = nn.LayerNorm(8)
self.proj2 = nn.Linear(8, 4)
self.ln2 = nn.LayerNorm(4)
def forward(self, x):
x = self.embed(x)
x = self.proj1(x)
x = self.ln1(x)
x = self.proj2(x)
@@ -26,11 +27,12 @@ class SimpleNet(CheckpointModule):
return x
class DummyDataLoader(DummyDataGenerator):
def generate(self):
data = torch.rand(16, 4, device=get_current_device())
label = torch.randint(low=0, high=2, size=(16,), device=get_current_device())
data = torch.randint(low=0, high=20, size=(16,20), device=get_current_device())
label = torch.randint(low=0, high=2, size=(16,4), device=get_current_device())
return data, label