[shardformer] opt fix. (#4514)

* [shardformer] chatglm support sequence parallel

[shardformer] chatglm support sequence parallel

[shardformer] chatglm support sequence parallel

[shardformer] chatglm support sequence parallel

[shardformer] chatglm support sequence parallel

[shardformer] chatglm support sequence parallel

* fix

fix

fix

fix

* [shardformer] jit fused fix

* [shardformer] jit fused fix

* [shardformer] jit fused fix

* [shardformer] jit fused fix

* [shardformer] jit fused fix

* [shardformer] jit fused fix

* [shardformer] jit fused fix

* activate checks

* [Test] test ci

* test ci

* test ci

* test ci

* test ci

* test ci

* test ci

* fix
This commit is contained in:
flybird11111
2023-08-25 19:41:24 +08:00
committed by GitHub
parent 3353e55c80
commit de8a65babc
3 changed files with 14 additions and 15 deletions

View File

@@ -103,21 +103,21 @@ class OPTPolicy(Policy):
target_key=OPTDecoderLayer)
# use flash attention
if self.shard_config.enable_flash_attention:
self.append_or_create_method_replacement(description={
'forward': get_opt_flash_attention_forward(),
},
policy=policy,
target_key=OPTAttention)
# if self.shard_config.enable_flash_attention:
# self.append_or_create_method_replacement(description={
# 'forward': get_opt_flash_attention_forward(),
# },
# policy=policy,
# target_key=OPTAttention)
# use jit fused operator
if self.shard_config.enable_jit_fused:
self.append_or_create_method_replacement(description={
'forward': get_jit_fused_opt_decoder_layer_forward(),
'dropout_add': get_jit_fused_dropout_add_func(),
},
policy=policy,
target_key=OPTDecoderLayer)
# if self.shard_config.enable_jit_fused:
# self.append_or_create_method_replacement(description={
# 'forward': get_jit_fused_opt_decoder_layer_forward(),
# 'dropout_add': get_jit_fused_dropout_add_func(),
# },
# policy=policy,
# target_key=OPTDecoderLayer)
return policy