mirror of
https://github.com/huggingface/transformers.git
synced 2025-10-20 17:13:56 +08:00
Adjust device logging level and add minor fixes (#41636)
This commit addresses a noisy warning and improves the robustness of the base pipeline implementation. - The device placement message in the pipeline base class has been changed from a `warning` to a `debug` log. This reduces log noise for users who are aware of their device setup, while still providing the information for debugging purposes. - Additionally, potential `UnboundLocalError` exceptions in the `_pad` and `check_model_type` functions have been prevented by initializing variables before their conditional assignment.
This commit is contained in:
committed by
GitHub
parent
2935a1be19
commit
35dc8f0a2e
@ -94,6 +94,7 @@ def _pad(items, key, padding_value, padding_side):
|
||||
min_length = min(item[key].shape[1] for item in items)
|
||||
dtype = items[0][key].dtype
|
||||
|
||||
tensor = None
|
||||
if dim == 2:
|
||||
if max_length == min_length:
|
||||
# Bypass for `ImageGPT` which doesn't provide a padding value, yet
|
||||
@ -105,6 +106,9 @@ def _pad(items, key, padding_value, padding_side):
|
||||
elif dim == 4:
|
||||
tensor = torch.zeros((batch_size, max_length, shape[-2], shape[-1]), dtype=dtype) + padding_value
|
||||
|
||||
if tensor is None:
|
||||
raise ValueError(f"Unable to create tensor for padding from {key} with dimension {dim}")
|
||||
|
||||
for i, item in enumerate(items):
|
||||
if dim == 2:
|
||||
if padding_side == "left":
|
||||
@ -866,7 +870,7 @@ class Pipeline(_ScikitCompat, PushToHubMixin):
|
||||
|
||||
if torch.distributed.is_available() and torch.distributed.is_initialized():
|
||||
self.device = self.model.device
|
||||
logger.warning(f"Device set to use {self.device}")
|
||||
logger.debug(f"Device set to use {self.device}")
|
||||
|
||||
self.binary_output = binary_output
|
||||
|
||||
@ -1127,6 +1131,7 @@ class Pipeline(_ScikitCompat, PushToHubMixin):
|
||||
if self.task in SUPPORTED_PEFT_TASKS:
|
||||
supported_models_names.extend(SUPPORTED_PEFT_TASKS[self.task])
|
||||
|
||||
model_name = None
|
||||
for model_name in supported_models.values():
|
||||
# Mapping can now contain tuples of models for the same configuration.
|
||||
if isinstance(model_name, tuple):
|
||||
|
Reference in New Issue
Block a user