mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-24 03:03:37 +00:00
[fx] support module with bias addition (#1780)
* [autoparallel] refactor tracer to fix bias addition issue * [fx] support module with bias addition * create bias_addition_module * refactor file structure * polish code * fix unit test
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
import torch
|
||||
|
||||
from ..registry import meta_patched_function
|
||||
from ...registry import meta_patched_function
|
||||
|
||||
|
||||
@meta_patched_function.register(torch.matmul)
|
||||
@@ -57,6 +57,16 @@ def torch_bmm(input, mat2, *, out=None):
|
||||
return torch.empty(batch_size, n, p, device="meta")
|
||||
|
||||
|
||||
@meta_patched_function.register(torch.nn.functional.linear)
|
||||
def torch_linear(input, mat2, *, out=None):
|
||||
if out is not None:
|
||||
raise ValueError("Don't support in-place abs for MetaTensor analysis")
|
||||
output_shape = list(input.shape)
|
||||
output_feature = list(mat2.shape)[0]
|
||||
output_shape[-1] = output_feature
|
||||
return torch.empty(*output_shape, device="meta")
|
||||
|
||||
|
||||
@meta_patched_function.register(torch.addbmm)
|
||||
@meta_patched_function.register(torch.Tensor.addbmm)
|
||||
def torch_addbmm(input, mat1, mat2, *, beta=1, alpha=1, out=None):
|
||||
|
Reference in New Issue
Block a user