Optimize AMP custom_backend_name error message (#162037)

Print out amp target dtype and let custom backend easier find out expected dtype while integration.

## Test Result

### Before
```python
In [1]: import torch
   ...: import torch_openreg
   ...:
   ...: a = torch.randn(3, 4)
   ...: b = torch.randn(4, 2)
   ...: with torch.autocast("openreg", dtype=torch.float16):
   ...:     torch.mm(a, b)
   ...:
/home/coder/code/pytorch/torch/amp/autocast_mode.py:332: UserWarning: In openreg autocast, but the target dtype is not supported. Disabling autocast.
 openreg Autocast only supports dtypes of torch.float32 currently.
  warnings.warn(error_message
```

### After
```python
In [1]: import torch
   ...: import torch_openreg
   ...:
   ...: a = torch.randn(3, 4)
   ...: b = torch.randn(4, 2)
   ...: with torch.autocast("openreg", dtype=torch.float16):
   ...:     torch.mm(a, b)
   ...:

/home/coder/code/pytorch/torch/amp/autocast_mode.py:332: UserWarning: In openreg autocast, but the target dtype torch.float16 is not supported. Disabling autocast.
 openreg Autocast only supports dtypes of torch.float32 currently.
  warnings.warn(error_message)
```

Pull Request resolved: https://github.com/pytorch/pytorch/pull/162037
Approved by: https://github.com/zou3519
This commit is contained in:
zeshengzong
2025-09-04 08:27:56 +00:00
committed by PyTorch MergeBot
parent e19e02c84c
commit 8fd3c9ce91

View File

@ -324,7 +324,7 @@ class autocast:
elif self.device == self.custom_backend_name:
supported_dtype = self.custom_device_mod.get_amp_supported_dtype()
if self.fast_dtype not in supported_dtype:
error_message = f"In {self.custom_backend_name} autocast, but the target dtype is not supported. "
error_message = f"In {self.custom_backend_name} autocast, but the target dtype {self.fast_dtype} is not supported. "
error_message += f"Disabling autocast.\n {self.custom_backend_name} Autocast only supports dtypes of "
error_message += (
", ".join(str(dtype) for dtype in supported_dtype) + " currently."