mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-21 05:34:18 +08:00
[1/N] replace THPUtils_assert with TORCH_CHECK (#116675)
This PR replaces THPUtils_assert with TORCH_CHECK. Pull Request resolved: https://github.com/pytorch/pytorch/pull/116675 Approved by: https://github.com/albanD
This commit is contained in:
@ -897,9 +897,8 @@ static int THPVariable_set_grad_fn(
|
||||
if (check_has_torch_function((PyObject*)self)) {
|
||||
return handle_torch_function_setter(self, "_grad_fn", obj);
|
||||
}
|
||||
THPUtils_assertRet(
|
||||
-1, obj, "Deletion of _grad_fn not allowed. Detach tensor instead!");
|
||||
THPUtils_assertRet(-1, obj == Py_None, "_grad_fn can be only set to None");
|
||||
TORCH_CHECK(obj, "Deletion of _grad_fn not allowed. Detach tensor instead!");
|
||||
TORCH_CHECK(obj == Py_None, "_grad_fn can be only set to None");
|
||||
THPVariable_Unpack(self).detach_();
|
||||
return 0;
|
||||
END_HANDLE_TH_ERRORS_RET(-1)
|
||||
@ -919,8 +918,8 @@ int THPVariable_set_data(THPVariable* self, PyObject* data, void* unused) {
|
||||
if (check_has_torch_function((PyObject*)self)) {
|
||||
return handle_torch_function_setter(self, "data", data);
|
||||
}
|
||||
THPUtils_assertRet(
|
||||
-1, data, "Deleting tensor data is not allowed. Delete tensor instead!");
|
||||
TORCH_CHECK(
|
||||
data, "Deleting tensor data is not allowed. Delete tensor instead!");
|
||||
if (!THPVariable_Check(data)) {
|
||||
throw torch::TypeError(
|
||||
"Variable data has to be a tensor, but got %s", Py_TYPE(data)->tp_name);
|
||||
@ -946,10 +945,8 @@ int THPVariable_set_grad(THPVariable* self, PyObject* py_grad, void* unused) {
|
||||
THPVariable_Check(py_grad),
|
||||
"assigned grad expected to be a Tensor or None but got grad of type ",
|
||||
THPUtils_typename(py_grad));
|
||||
THPUtils_assertRet(
|
||||
-1,
|
||||
self != (THPVariable*)py_grad,
|
||||
"can't assign Variable as its own grad");
|
||||
TORCH_CHECK(
|
||||
self != (THPVariable*)py_grad, "can't assign Variable as its own grad");
|
||||
|
||||
const auto& grad = THPVariable_Unpack(py_grad);
|
||||
TORCH_CHECK(
|
||||
@ -1111,8 +1108,7 @@ int THPVariable_set_names(PyObject* self, PyObject* names, void* unused) {
|
||||
if (names == Py_None) {
|
||||
at::internal_set_names_inplace(var, at::nullopt);
|
||||
} else {
|
||||
THPUtils_assertRet(
|
||||
-1,
|
||||
TORCH_CHECK(
|
||||
THPUtils_checkDimnameList(names),
|
||||
"names must either be None or a tuple of dim names");
|
||||
at::internal_set_names_inplace(var, torch::parseDimnameList(names));
|
||||
@ -1129,8 +1125,7 @@ int THPVariable_set_requires_grad(
|
||||
if (check_has_torch_function((PyObject*)self)) {
|
||||
return handle_torch_function_setter(self, "requires_grad", obj);
|
||||
}
|
||||
THPUtils_assertRet(
|
||||
-1, obj && PyBool_Check(obj), "requires_grad must be a bool");
|
||||
TORCH_CHECK(obj && PyBool_Check(obj), "requires_grad must be a bool");
|
||||
const auto& var = THPVariable_Unpack(self);
|
||||
auto requires_grad = (obj == Py_True);
|
||||
if (!var.is_leaf()) {
|
||||
@ -1182,7 +1177,7 @@ int THPVariable_set_backwards_hooks(
|
||||
if (check_has_torch_function((PyObject*)self)) {
|
||||
return handle_torch_function_setter(self, "_backward_hooks", obj);
|
||||
}
|
||||
THPUtils_assertRet(-1, obj, "Deletion of _backwards_hooks not allowed!");
|
||||
TORCH_CHECK(obj, "Deletion of _backwards_hooks not allowed!");
|
||||
if (obj == Py_None) {
|
||||
obj = nullptr;
|
||||
}
|
||||
@ -1223,8 +1218,7 @@ int THPVariable_set_post_accumulate_grad_hooks(
|
||||
return handle_torch_function_setter(
|
||||
self, "_post_accumulate_grad_hooks", obj);
|
||||
}
|
||||
THPUtils_assertRet(
|
||||
-1, obj, "Deletion of _post_accumulate_grad_hooks not allowed!");
|
||||
TORCH_CHECK(obj, "Deletion of _post_accumulate_grad_hooks not allowed!");
|
||||
if (obj == Py_None) {
|
||||
obj = nullptr;
|
||||
}
|
||||
|
Reference in New Issue
Block a user