mirror of
https://github.com/vllm-project/vllm.git
synced 2025-10-20 14:53:52 +08:00
[ROCM] fix native attention function call (#13650)
This commit is contained in:
@ -717,7 +717,6 @@ class ROCmFlashAttentionImpl(AttentionImpl):
|
||||
self.num_heads,
|
||||
self.head_size,
|
||||
self.scale,
|
||||
causal_mask,
|
||||
attn_masks,
|
||||
)
|
||||
else:
|
||||
|
Reference in New Issue
Block a user