mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-08-24 02:30:56 +00:00
* [legacy] move communication to legacy (#4640) * [legacy] refactor logger and clean up legacy codes (#4654) * [legacy] make logger independent to gpc * [legacy] make optim independent to registry * [legacy] move test engine to legacy * [legacy] move nn to legacy (#4656) * [legacy] move nn to legacy * [checkpointio] fix save hf config * [test] remove useledd rpc pp test * [legacy] fix nn init * [example] skip tutorial hybriad parallel example * [devops] test doc check * [devops] test doc check
19 lines
393 B
Python
19 lines
393 B
Python
import torch
|
|
|
|
import colossalai.legacy.nn as col_nn
|
|
|
|
|
|
class MLP(torch.nn.Module):
|
|
|
|
def __init__(self, dim: int, layers: int):
|
|
super().__init__()
|
|
self.layers = torch.nn.ModuleList()
|
|
|
|
for _ in range(layers):
|
|
self.layers.append(col_nn.Linear(dim, dim))
|
|
|
|
def forward(self, x):
|
|
for layer in self.layers:
|
|
x = layer(x)
|
|
return x
|