Skip to content

Commit 5ad199d

Browse files
committed
fix rebase
Signed-off-by: Jieming Zhang <jiemingz@nvidia.com>
1 parent dcb0aab commit 5ad199d

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

megatron/core/transformer/transformer_layer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -591,7 +591,7 @@ def _forward_pre_mlp_layernorm(self, hidden_states):
591591
pre_mlp_layernorm_output = self.pre_mlp_layernorm(hidden_states)
592592
return pre_mlp_layernorm_output
593593

594-
def _forward_post_mlp(self, mlp_output_with_bias, residual, using_fused_tp_inference_kernel):
594+
def _forward_post_mlp(self, mlp_output_with_bias, residual, using_fused_tp_inference_kernel=False):
595595
# TODO: could we move `bias_dropout_add_exec_handler` itself
596596
# inside the module provided in the `bias_dropout_add_spec` module?
597597
nvtx_range_push(suffix="mlp_bda")

0 commit comments

Comments
 (0)