[fix] unused option

This commit is contained in:
Runyu Lu 2024-03-21 11:28:42 +08:00
parent aabc9fb6aa
commit 4eafe0c814

View File

@ -199,8 +199,7 @@ def llama_rmsnorm_forward(
residual: torch.Tensor = None, residual: torch.Tensor = None,
use_cuda_kernel: bool = True, use_cuda_kernel: bool = True,
): ):
# if use_cuda_kernel: if use_cuda_kernel:
if False:
if residual is not None: if residual is not None:
inference_ops.fused_add_rms_layernorm(hidden_states, residual, self.weight.data, self.variance_epsilon) inference_ops.fused_add_rms_layernorm(hidden_states, residual, self.weight.data, self.variance_epsilon)
return hidden_states, residual return hidden_states, residual
@ -340,8 +339,7 @@ class NopadLlamaAttention(LlamaAttention):
sm_scale=sm_scale, sm_scale=sm_scale,
) )
else: else:
# if use_cuda_kernel: if use_cuda_kernel:
if False:
inference_ops.rotary_embedding_and_cache_copy( inference_ops.rotary_embedding_and_cache_copy(
query_states, query_states,
key_states, key_states,