[autoparallel] fix bias addition module (#1800)

This commit is contained in:
YuliangLiu0306
2022-11-08 16:21:25 +08:00
committed by GitHub
parent 6e9730d7ab
commit f6032ddb17
9 changed files with 438 additions and 20 deletions

View File

@@ -58,7 +58,7 @@ def torch_bmm(input, mat2, *, out=None):
@meta_patched_function.register(torch.nn.functional.linear)
def torch_linear(input, mat2, *, out=None):
def torch_linear(input, mat2, bias=None, *, out=None):
if out is not None:
raise ValueError("Don't support in-place abs for MetaTensor analysis")
output_shape = list(input.shape)