mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-11 05:49:55 +00:00
[misc] update pre-commit and run all files (#4752)
* [misc] update pre-commit * [misc] run pre-commit * [misc] remove useless configuration files * [misc] ignore cuda for clang-format
This commit is contained in:
@@ -5,7 +5,6 @@ from colossalai.shardformer.shard.utils import set_tensors_to_none
|
||||
|
||||
|
||||
class Net(nn.Module):
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.layers = nn.Sequential(nn.Linear(1, 2), nn.Linear(2, 3))
|
||||
|
Reference in New Issue
Block a user