[Bugfix] Fix Shared Expert/Zero expert code in FusedMoE.process_chunk (#25698)

Signed-off-by: Sage Moore <sage@neuralmagic.com>
Co-authored-by: Robert Shaw <114415538+robertgshaw2-redhat@users.noreply.github.com>
This commit is contained in:
Sage Moore
2025-09-26 01:25:28 -07:00
committed by GitHub
parent 19f76ee68e
commit dfb9af2014

View File

@ -1925,7 +1925,9 @@ class FusedMoE(CustomOp):
assert self.shared_experts is None or isinstance(
final_hidden_states, tuple)
if isinstance(final_hidden_states, tuple):
if self.zero_expert_num is not None and self.zero_expert_num > 0:
assert isinstance(final_hidden_states, tuple)
assert self.shared_experts is None
final_hidden_states, zero_expert_result = final_hidden_states
if zero_expert_result is not None:
final_hidden_states += zero_expert_result