mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-17 23:18:36 +00:00
[Feature] Zigzag Ring attention (#5905)
* halfway * fix cross-PP-stage position id length diff bug * fix typo * fix typo * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * unified cross entropy func for all shardformer models * remove redundant lines * add basic ring attn; debug cross entropy * fwd bwd logic complete * fwd bwd logic complete; add experimental triton rescale * precision tests passed * precision tests passed * fix typos and remove misc files * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * add sp_mode to benchmark; fix varlen interface * update softmax_lse shape by new interface * change tester name * remove buffer clone; support packed seq layout * add varlen tests * fix typo * all tests passed * add dkv_group; fix mask * remove debug statements --------- Co-authored-by: Edenzzzz <wtan45@wisc.edu> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
@@ -88,6 +88,7 @@ def check_attn_func(dtype: torch.dtype, attn_func, attn_kwargs: dict, padding_ma
|
||||
padding_mask = padding_mask[:, None, :, None].logical_not()
|
||||
ref_output = ref_output.masked_fill(padding_mask, 0)
|
||||
output = output.masked_fill(padding_mask, 0)
|
||||
|
||||
assert_close(output, ref_output, **tols)
|
||||
output.mean().backward()
|
||||
ref_output.mean().backward()
|
||||
@@ -128,6 +129,8 @@ def test_flash_attn_func(dtype: torch.dtype):
|
||||
attn_kwargs, padding_mask = gen_kwargs_func(dtype)
|
||||
for attn_func, name, need_postprocess in attn_funcs:
|
||||
print(f"{dtype}, {name}, {mask_type}")
|
||||
if mask_type == "padded":
|
||||
pass
|
||||
if need_postprocess:
|
||||
check_attn_func(dtype, attn_func, post_process_kwargs_for_raw_attn(attn_kwargs), padding_mask)
|
||||
else:
|
||||
|
Reference in New Issue
Block a user