mirror of
https://github.com/huggingface/peft.git
synced 2025-10-20 23:43:47 +08:00
ENH Optimize LoraParallelLinear initialization (#2576)
This commit is contained in:
@ -74,7 +74,7 @@ class LoraParallelLinear(nn.Module, LoraLayer):
|
|||||||
init_method = megatron_config.init_method
|
init_method = megatron_config.init_method
|
||||||
input_is_parallel = True
|
input_is_parallel = True
|
||||||
gather_output = False
|
gather_output = False
|
||||||
if isinstance(base_layer, self.backend.RowParallelLinear):
|
if self.is_parallel_a:
|
||||||
input_is_parallel = base_layer.input_is_parallel
|
input_is_parallel = base_layer.input_is_parallel
|
||||||
else:
|
else:
|
||||||
gather_output = base_layer.gather_output
|
gather_output = base_layer.gather_output
|
||||||
|
Reference in New Issue
Block a user