Files
pytorch/torch/csrc/cpu/Module.cpp
Nikita Shulga e56dcf2772 [CPUInductor] Fix SVE256 detection (#146207)
This PR removes `torch.cpu._is_arm_sve_supported()` and replaces is with stable `torch.backends.cpu.get_cpu_capability()`

I should have reviewed https://github.com/pytorch/pytorch/pull/134672 more thoroughly, because it introduced duplicate, but slightly different API for detecting CPU architectures, which resulted in runtime crashes on system that do support SVE128, rather than SVE256

Fixes https://github.com/pytorch/pytorch/issues/145441

Pull Request resolved: https://github.com/pytorch/pytorch/pull/146207
Approved by: https://github.com/angelayi
2025-02-01 18:51:34 +00:00

23 lines
867 B
C++

#include <ATen/cpu/Utils.h>
#include <torch/csrc/cpu/Module.h>
#include <torch/csrc/utils/pybind.h>
namespace torch::cpu {
void initModule(PyObject* module) {
auto m = py::handle(module).cast<py::module>();
auto cpu = m.def_submodule("_cpu", "cpu related pybind.");
cpu.def("_is_avx2_supported", at::cpu::is_avx2_supported);
cpu.def("_is_avx512_supported", at::cpu::is_avx512_supported);
cpu.def("_is_avx512_vnni_supported", at::cpu::is_avx512_vnni_supported);
cpu.def("_is_avx512_bf16_supported", at::cpu::is_avx512_bf16_supported);
cpu.def("_is_amx_tile_supported", at::cpu::is_amx_tile_supported);
cpu.def("_is_amx_fp16_supported", at::cpu::is_amx_fp16_supported);
cpu.def("_init_amx", at::cpu::init_amx);
cpu.def("_L1d_cache_size", at::cpu::L1d_cache_size);
cpu.def("_L2_cache_size", at::cpu::L2_cache_size);
}
} // namespace torch::cpu