[aoti][mps] Add fused_rms and sdpa_mps fallback ops (#156844)

Needed for llama3.1

Pull Request resolved: https://github.com/pytorch/pytorch/pull/156844
Approved by: https://github.com/desertfire
ghstack dependencies: #156843
This commit is contained in:
angelayi
2025-06-25 20:06:49 -07:00
committed by PyTorch MergeBot
parent 17dab018e3
commit aff9c1eec5
2 changed files with 4 additions and 0 deletions

View File

@ -39,10 +39,12 @@ inductor_fallback_ops: dict[str, dict[str, list[str]]] = {
"aten._flash_attention_forward.default": {},
"aten._fused_moving_avg_obs_fq_helper_functional.default": {},
"aten._fused_moving_avg_obs_fq_helper.default": {},
"aten._fused_rms_norm.default": {},
"aten._histogramdd_from_bin_cts.default": {},
"aten._int_mm.out": {},
"aten._pdist_backward.default": {},
"aten._pdist_forward.default": {},
"aten._scaled_dot_product_attention_math_for_mps.default": {},
"aten._scaled_dot_product_cudnn_attention_backward.default": {},
"aten._scaled_dot_product_cudnn_attention.default": {},
"aten._scaled_dot_product_efficient_attention_backward.default": {},