mirror of
https://github.com/vllm-project/vllm.git
synced 2025-10-20 14:53:52 +08:00
[Misc]Fix BitAndBytes exception messages (#7626)
This commit is contained in:
@ -883,11 +883,11 @@ class BitsAndBytesModelLoader(BaseModelLoader):
|
||||
if not hasattr(model, 'load_weights'):
|
||||
raise AttributeError(
|
||||
"The required method 'load_weights' is not defined in class"
|
||||
f" {type(self).__name__}.")
|
||||
f" {type(model).__name__}.")
|
||||
|
||||
if not hasattr(model, 'bitsandbytes_stacked_params_mapping'):
|
||||
raise AttributeError(
|
||||
f"Model {type(self).__name__} does not support BitsAndBytes "
|
||||
f"Model {type(model).__name__} does not support BitsAndBytes "
|
||||
"quantization yet.")
|
||||
|
||||
logger.info("Loading weights with BitsAndBytes quantization. "
|
||||
|
Reference in New Issue
Block a user