fix some typos : supported_head_sizes (#14627)

This commit is contained in:
Yang.Tao
2025-03-12 01:38:24 +08:00
committed by GitHub
parent 4cbf286794
commit 53056731fd

View File

@ -335,11 +335,11 @@ class BlocksparseFlashAttentionImpl(AttentionImpl):
self.sparse_block_size = self.blocksparse_params.block_size
self.head_sliding_step = self.blocksparse_params.head_sliding_step
suppored_head_sizes = PagedAttention.get_supported_head_sizes()
if head_size not in suppored_head_sizes:
supported_head_sizes = PagedAttention.get_supported_head_sizes()
if head_size not in supported_head_sizes:
raise ValueError(
f"Head size {head_size} is not supported by PagedAttention. "
f"Supported head sizes are: {suppored_head_sizes}.")
f"Supported head sizes are: {supported_head_sizes}.")
self.tp_size = get_tensor_model_parallel_world_size()
self.tp_rank = get_tensor_model_parallel_rank()