mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Back out "Revert D81959389" (#163905)
Summary: Original commit changeset: 06888d7ebff0 Original Phabricator Diff: D82932788 Restricted the test to SM90 for scaled_grouped_mm Test Plan: TBD (will share the linux CI results) Differential Revision: D83283991 Pull Request resolved: https://github.com/pytorch/pytorch/pull/163905 Approved by: https://github.com/angelayi
This commit is contained in:
committed by
PyTorch MergeBot
parent
bbf6816f35
commit
7afcb030d8
@ -56,6 +56,7 @@ inductor_fallback_ops: dict[str, dict[str, list[str]]] = {
|
||||
"aten._scaled_dot_product_fused_attention_overrideable_backward.default": {},
|
||||
"aten._scaled_dot_product_fused_attention_overrideable.default": {},
|
||||
"aten._scaled_mm.default": {},
|
||||
"aten._scaled_grouped_mm.default": {},
|
||||
"aten._scaled_mm.out": {},
|
||||
"aten._segment_reduce_backward.default": {},
|
||||
"aten._thnn_fused_lstm_cell.default": {},
|
||||
|
Reference in New Issue
Block a user