[fx] support module with bias addition (#1780)

* [autoparallel] refactor tracer to fix bias addition issue

* [fx] support module with bias addition

* create bias_addition_module

* refactor file structure

* polish code

* fix unit test
This commit is contained in:
YuliangLiu0306
2022-11-01 22:53:51 +08:00
committed by GitHub
parent f3f19a5c47
commit e859380bf7
41 changed files with 624 additions and 259 deletions

View File

@@ -1,6 +1,6 @@
import torch
from ..registry import meta_patched_function
from ...registry import meta_patched_function
@meta_patched_function.register(torch.matmul)
@@ -57,6 +57,16 @@ def torch_bmm(input, mat2, *, out=None):
return torch.empty(batch_size, n, p, device="meta")
@meta_patched_function.register(torch.nn.functional.linear)
def torch_linear(input, mat2, *, out=None):
if out is not None:
raise ValueError("Don't support in-place abs for MetaTensor analysis")
output_shape = list(input.shape)
output_feature = list(mat2.shape)[0]
output_shape[-1] = output_feature
return torch.empty(*output_shape, device="meta")
@meta_patched_function.register(torch.addbmm)
@meta_patched_function.register(torch.Tensor.addbmm)
def torch_addbmm(input, mat1, mat2, *, beta=1, alpha=1, out=None):