[2/N] Apply bugprone-unchecked-optional-access (#141091)

Fixes #ISSUE_NUMBER

Pull Request resolved: https://github.com/pytorch/pytorch/pull/141091
Approved by: https://github.com/Skylion007, https://github.com/albanD

Co-authored-by: Aaron Gokaslan <aaronGokaslan@gmail.com>
This commit is contained in:
cyy
2024-12-09 19:30:15 +00:00
committed by PyTorch MergeBot
parent 005c5694eb
commit b4c0973b59
24 changed files with 71 additions and 72 deletions

View File

@ -18,9 +18,9 @@ inline std::optional<int64_t> _check_param_device(
} else {
bool warn = false;
if (param.is_cuda()) { // Check if in same GPU
warn = (param.get_device() != old_param_device.value());
warn = (param.get_device() != old_param_device);
} else { // Check if in CPU
warn = (old_param_device.value() != -1);
warn = (old_param_device != -1);
}
if (warn) {
TORCH_CHECK(

View File

@ -16,14 +16,14 @@ EmbeddingImpl::EmbeddingImpl(EmbeddingOptions options_)
}
void EmbeddingImpl::reset() {
if (options.padding_idx() != std::nullopt) {
if (*options.padding_idx() > 0) {
if (options.padding_idx().has_value()) {
if (options.padding_idx() > 0) {
TORCH_CHECK(
*options.padding_idx() < options.num_embeddings(),
options.padding_idx() < options.num_embeddings(),
"Padding_idx must be within num_embeddings");
} else if (*options.padding_idx() < 0) {
} else if (options.padding_idx() < 0) {
TORCH_CHECK(
*options.padding_idx() >= -options.num_embeddings(),
options.padding_idx() >= -options.num_embeddings(),
"Padding_idx must be within num_embedding");
options.padding_idx(options.num_embeddings() + *options.padding_idx());
}