Add HPU to compatible shallow copy list and remove lazy HPU changes (#94673)

Fixes #ISSUE_NUMBER

Pull Request resolved: https://github.com/pytorch/pytorch/pull/94673
Approved by: https://github.com/wconstab
This commit is contained in:
Sujoy Saraswati
2023-02-14 17:15:25 +00:00
committed by PyTorch MergeBot
parent 5c64d2141f
commit 4a5ce921a0
2 changed files with 4 additions and 4 deletions

View File

@ -1896,7 +1896,8 @@ struct C10_API TensorImpl : public c10::intrusive_ptr_target {
BackendComponent::CUDABit,
BackendComponent::MPSBit,
BackendComponent::HIPBit,
BackendComponent::XPUBit});
BackendComponent::XPUBit,
BackendComponent::HPUBit});
constexpr auto dense_k = DispatchKeySet(DispatchKey::Dense);
return ts.has_any(dense_k) && ts.has_any(dense_backends);
};

View File

@ -97,8 +97,7 @@ class Tensor(torch._C._TensorBase):
# Update the test in test_serialization if you remove 'meta' from here
if (
self.is_sparse
or self.device.type
in ["lazy", "xla", "mps", "ort", "meta", "hpu", "ipu"]
or self.device.type in ["lazy", "xla", "mps", "ort", "meta", "ipu"]
or (
not torch._C._has_storage(self)
and self.device.type == "privateuseone"
@ -256,7 +255,7 @@ class Tensor(torch._C._TensorBase):
# 2. Python list is not a good fit due to performance reason.
# `tolist()` converts every single element in the tensor into python objects
# and serialize them one by one.
if self.device.type in ["xla", "ort", "hpu"] or (
if self.device.type in ["xla", "ort"] or (
not torch._C._has_storage(self) and self.device.type == "privateuseone"
):
# Convert BFloat16 tesors to Float32 before conversion to numpy, as numpy doesn't