ENH Optimize LoraParallelLinear initialization (#2576)

This commit is contained in:
JavaZero
2025-06-11 19:48:51 +08:00
committed by GitHub
parent e67052b18c
commit a8b9a6cecc

View File

@ -74,7 +74,7 @@ class LoraParallelLinear(nn.Module, LoraLayer):
init_method = megatron_config.init_method init_method = megatron_config.init_method
input_is_parallel = True input_is_parallel = True
gather_output = False gather_output = False
if isinstance(base_layer, self.backend.RowParallelLinear): if self.is_parallel_a:
input_is_parallel = base_layer.input_is_parallel input_is_parallel = base_layer.input_is_parallel
else: else:
gather_output = base_layer.gather_output gather_output = base_layer.gather_output