Skip to content

Commit 27d7865

Browse files
fix circular dependencies when using Lora (bigscience-workshop#284)
1 parent b7529f3 commit 27d7865

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

megatron/core/tensor_parallel/layers.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -503,7 +503,7 @@ def __init__(self, input_size, output_size, *,
503503
skip_bias_add=False,
504504
skip_weight_param_allocation: bool=False,
505505
moe=False, enable_expert_tensor_parallelism=False):
506-
super(ColumnParallelLinear, self).__init__()
506+
torch.nn.Module.__init__(self)
507507

508508
# Keep input parameters
509509
self.input_size = input_size
@@ -691,7 +691,7 @@ def __init__(self, input_size: int, output_size: int, *,
691691
keep_master_weight_for_test: bool = False,
692692
skip_bias_add: bool = False,
693693
moe=False, enable_expert_tensor_parallelism=False):
694-
super(RowParallelLinear, self).__init__()
694+
torch.nn.Module.__init__(self)
695695

696696
# Keep input parameters
697697
self.input_size = input_size

0 commit comments

Comments
 (0)