Revert "Remove guard_size_oblivious from default contiguity python check, and add aten.sym_is_contiguous. (#159197)"

This reverts commit e444cd24d48b3a46f067974f2cc157f5ed27709f.

Reverted https://github.com/pytorch/pytorch/pull/159197 on behalf of https://github.com/laithsakka due to internal build failures ([comment](https://github.com/pytorch/pytorch/pull/159197#issuecomment-3195436668))
This commit is contained in:
PyTorch MergeBot
2025-08-18 07:22:13 +00:00
parent d8d589bd3a
commit b82aa3df20
20 changed files with 34 additions and 141 deletions

View File

@ -79,7 +79,6 @@ tensorOptionsT = BaseCppType("at", "TensorOptions")
typeAndSizeT = BaseCppType("torch::autograd::generated", "TypeAndSize")
tensorGeometryT = BaseCppType("at", "TensorGeometry")
SymIntT = BaseCppType("c10", "SymInt")
SymBoolT = BaseCppType("c10", "SymBool")
symIntArrayRefT = BaseCppType("c10", "SymIntArrayRef")
# Types representing template parameters. Technically, we probably shouldn't
@ -126,7 +125,6 @@ BaseTypeToCppMapping: dict[BaseTy, BaseCppType] = {
BaseTy.Storage: storageT,
BaseTy.Stream: streamT,
BaseTy.SymInt: SymIntT,
BaseTy.SymBool: SymBoolT,
}
# CTypes encode C++ type structure as needed for translation.