mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-05 02:51:59 +00:00
[shardformer] support bias_gelu_jit_fused for models (#5647)
* support gelu_bias_fused for gpt2 * support gelu_bias_fused for gpt2 fix fix fix * fix fix * fix
This commit is contained in:
@@ -1287,3 +1287,16 @@ def bert_sequence_parallel_forward_fn(shard_config: ShardConfig):
|
||||
)
|
||||
|
||||
return forward
|
||||
|
||||
|
||||
def get_jit_fused_bert_intermediate_forward():
|
||||
from transformers.models.bert.modeling_bert import BertIntermediate
|
||||
|
||||
from colossalai.kernel.jit.bias_gelu import GeLUFunction as JitGeLUFunction
|
||||
|
||||
def forward(self: BertIntermediate, hidden_states: torch.Tensor) -> torch.Tensor:
|
||||
hidden_states, bias = self.dense(hidden_states)
|
||||
hidden_states = JitGeLUFunction.apply(hidden_states, bias)
|
||||
return hidden_states
|
||||
|
||||
return forward
|
||||
|
Reference in New Issue
Block a user