mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
[2/N] Apply bugprone-unchecked-optional-access (#141091)
Fixes #ISSUE_NUMBER Pull Request resolved: https://github.com/pytorch/pytorch/pull/141091 Approved by: https://github.com/Skylion007, https://github.com/albanD Co-authored-by: Aaron Gokaslan <aaronGokaslan@gmail.com>
This commit is contained in:
@ -18,9 +18,9 @@ inline std::optional<int64_t> _check_param_device(
|
||||
} else {
|
||||
bool warn = false;
|
||||
if (param.is_cuda()) { // Check if in same GPU
|
||||
warn = (param.get_device() != old_param_device.value());
|
||||
warn = (param.get_device() != old_param_device);
|
||||
} else { // Check if in CPU
|
||||
warn = (old_param_device.value() != -1);
|
||||
warn = (old_param_device != -1);
|
||||
}
|
||||
if (warn) {
|
||||
TORCH_CHECK(
|
||||
|
@ -16,14 +16,14 @@ EmbeddingImpl::EmbeddingImpl(EmbeddingOptions options_)
|
||||
}
|
||||
|
||||
void EmbeddingImpl::reset() {
|
||||
if (options.padding_idx() != std::nullopt) {
|
||||
if (*options.padding_idx() > 0) {
|
||||
if (options.padding_idx().has_value()) {
|
||||
if (options.padding_idx() > 0) {
|
||||
TORCH_CHECK(
|
||||
*options.padding_idx() < options.num_embeddings(),
|
||||
options.padding_idx() < options.num_embeddings(),
|
||||
"Padding_idx must be within num_embeddings");
|
||||
} else if (*options.padding_idx() < 0) {
|
||||
} else if (options.padding_idx() < 0) {
|
||||
TORCH_CHECK(
|
||||
*options.padding_idx() >= -options.num_embeddings(),
|
||||
options.padding_idx() >= -options.num_embeddings(),
|
||||
"Padding_idx must be within num_embedding");
|
||||
options.padding_idx(options.num_embeddings() + *options.padding_idx());
|
||||
}
|
||||
|
Reference in New Issue
Block a user