mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-06-26 15:32:22 +00:00
Fixed a typo
This commit is contained in:
parent
bbfebfb9fc
commit
b2eb9cd186
@ -159,7 +159,7 @@ def llama_attn_forward(
|
||||
|
||||
_, _, _, block_size = k_cache.shape
|
||||
|
||||
# NOTE: context_attention_unpadded is unsed for testing accuracy and we can only use aligned inputs.
|
||||
# NOTE: context_attention_unpadded is used for testing accuracy and we can only use aligned inputs.
|
||||
# The code below will be uncommented after the development of attention-related kernel is completed.
|
||||
if is_prompts:
|
||||
attn_output = context_attention_unpadded(
|
||||
|
Loading…
Reference in New Issue
Block a user