Skip to content

Commit 8b7d535

Browse files
authored
fix gptj (#5652)
1 parent 1b387ca commit 8b7d535

File tree

1 file changed

+0
-7
lines changed
  • colossalai/shardformer/policies

1 file changed

+0
-7
lines changed

colossalai/shardformer/policies/gptj.py

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,6 @@ def module_policy(self):
5454
if self.shard_config.enable_sequence_parallelism:
5555
self.shard_config.enable_sequence_parallelism = False
5656
warnings.warn("GPTJ doesn't support sequence parallelism now, will ignore the sequence parallelism flag.")
57-
use_sequence_parallel = self.shard_config.enable_sequence_parallelism
5857

5958
overlap = self.shard_config.enable_sequence_overlap
6059
if self.shard_config.enable_tensor_parallelism:
@@ -78,40 +77,34 @@ def module_policy(self):
7877
suffix="attn.k_proj",
7978
target_module=col_nn.Linear1D_Col,
8079
kwargs={
81-
"seq_parallel": use_sequence_parallel,
8280
"overlap": overlap,
8381
},
8482
),
8583
SubModuleReplacementDescription(
8684
suffix="attn.q_proj",
8785
target_module=col_nn.Linear1D_Col,
8886
kwargs={
89-
"seq_parallel": use_sequence_parallel,
9087
"overlap": overlap,
9188
},
9289
),
9390
SubModuleReplacementDescription(
9491
suffix="attn.v_proj",
9592
target_module=col_nn.Linear1D_Col,
9693
kwargs={
97-
"seq_parallel": use_sequence_parallel,
9894
"overlap": overlap,
9995
},
10096
),
10197
SubModuleReplacementDescription(
10298
suffix="attn.out_proj",
10399
target_module=col_nn.Linear1D_Row,
104-
kwargs={"seq_parallel": use_sequence_parallel},
105100
),
106101
SubModuleReplacementDescription(
107102
suffix="mlp.fc_in",
108103
target_module=col_nn.Linear1D_Col,
109-
kwargs={"seq_parallel": use_sequence_parallel},
110104
),
111105
SubModuleReplacementDescription(
112106
suffix="mlp.fc_out",
113107
target_module=col_nn.Linear1D_Row,
114-
kwargs={"seq_parallel": use_sequence_parallel},
115108
),
116109
SubModuleReplacementDescription(
117110
suffix="attn.attn_dropout",

0 commit comments

Comments
 (0)