From bda70b4b6676359a9d87eabb6524aca16dc9c15f Mon Sep 17 00:00:00 2001 From: puck_WCR <46049915+WANG-CR@users.noreply.github.com> Date: Mon, 16 May 2022 14:43:32 +0800 Subject: [PATCH] [NFC] polish colossalai/kernel/cuda_native/layer_norm.py code style (#980) --- colossalai/kernel/cuda_native/layer_norm.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/colossalai/kernel/cuda_native/layer_norm.py b/colossalai/kernel/cuda_native/layer_norm.py index af66eb827..38e95e2f8 100644 --- a/colossalai/kernel/cuda_native/layer_norm.py +++ b/colossalai/kernel/cuda_native/layer_norm.py @@ -24,8 +24,8 @@ class FusedLayerNormAffineFunction(torch.autograd.Function): input_ = input.contiguous() weight_ = weight.contiguous() bias_ = bias.contiguous() - output, mean, invvar = colossal_layer_norm_cuda.forward_affine( - input_, ctx.normalized_shape, weight_, bias_, ctx.eps) + output, mean, invvar = colossal_layer_norm_cuda.forward_affine(input_, ctx.normalized_shape, weight_, bias_, + ctx.eps) ctx.save_for_backward(input_, weight_, bias_, mean, invvar) return output @@ -72,8 +72,7 @@ class MixedFusedLayerNorm(torch.nn.Module): def forward(self, input): - return FusedLayerNormAffineFunction.apply(input, self.weight, self.bias, - self.normalized_shape, self.eps) + return FusedLayerNormAffineFunction.apply(input, self.weight, self.bias, self.normalized_shape, self.eps) def __repr__(self): return f'MixedFusedLayerNorm(normalized_shape={self.normalized_shape}, eps={self.eps})'