Files
ColossalAI/colossalai/fx/tracer/meta_patch/patched_module/activation_function.py
Hongxin Liu 079bf3cb26 [misc] update pre-commit and run all files (#4752)
* [misc] update pre-commit

* [misc] run pre-commit

* [misc] remove useless configuration files

* [misc] ignore cuda for clang-format
2023-09-19 14:20:26 +08:00

14 lines
428 B
Python

import torch
from ...registry import meta_patched_module
@meta_patched_module.register(torch.nn.ReLU)
@meta_patched_module.register(torch.nn.Sigmoid)
@meta_patched_module.register(torch.nn.GELU)
@meta_patched_module.register(torch.nn.Tanh)
@meta_patched_module.register(torch.nn.ReLU6)
@meta_patched_module.register(torch.nn.PReLU)
def torch_nn_non_linear_act(self, input):
return torch.empty(input.shape, device="meta")