From de09bab4b66002a8a9a2195f50f96a78868a3d39 Mon Sep 17 00:00:00 2001 From: Aaron Gokaslan Date: Sat, 18 Oct 2025 02:23:22 +0000 Subject: [PATCH] [BE]: Update cudnn frontend submodule to 1.15.0 (#165776) Update cudnn frontend submodule to 1.15.0 Pull Request resolved: https://github.com/pytorch/pytorch/pull/165776 Approved by: https://github.com/eqy --- aten/src/ATen/native/cudnn/MHA.cpp | 8 ++------ third_party/cudnn_frontend | 2 +- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/aten/src/ATen/native/cudnn/MHA.cpp b/aten/src/ATen/native/cudnn/MHA.cpp index 366fd0ae3c3c..7604244997bc 100644 --- a/aten/src/ATen/native/cudnn/MHA.cpp +++ b/aten/src/ATen/native/cudnn/MHA.cpp @@ -487,9 +487,7 @@ std::unique_ptr build_graph( auto scaled_dot_product_flash_attention_options = fe::graph::SDPA_attributes() .set_name("CUDNN_SDPA") - .set_is_inference(return_softmaxstats == false) - // TODO(eqy): switch to this API once cuDNN FE is upgraded - // .set_generate_stats(return_softmaxstats) + .set_generate_stats(return_softmaxstats) .set_causal_mask(is_causal) .set_attn_scale(attn_scale); if (use_ragged_in_dense(q, k, v, o, attn_bias.has_value())) { @@ -707,9 +705,7 @@ std::unique_ptr build_graph_nestedtensor( auto scaled_dot_product_flash_attention_options = fe::graph::SDPA_attributes() .set_name("CUDNN_SDPA_NESTEDTENSOR") - .set_is_inference(return_softmaxstats == false) - // TODO(eqy): switch to this API once cuDNN FE is upgraded - // .set_generate_stats(return_softmaxstats) + .set_generate_stats(return_softmaxstats) .set_causal_mask(is_causal) .set_attn_scale(attn_scale) .set_seq_len_q(SEQ_LEN_Q_) diff --git a/third_party/cudnn_frontend b/third_party/cudnn_frontend index f937055efc6d..0b1577c8c834 160000 --- a/third_party/cudnn_frontend +++ b/third_party/cudnn_frontend @@ -1 +1 @@ -Subproject commit f937055efc6d414d11f4c6577e3977fe74f35fb6 +Subproject commit 0b1577c8c83401237d601d0d0db5210506705396