Skip to content

Commit c94ac5e

Browse files
authored
fix
1 parent b1c3a2e commit c94ac5e

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

lightllm/models/qwen2/layer_weights/transformer_layer_weight.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ def _parse_config(self):
2121
self.tp_o_head_num_ = self.tp_q_head_num_
2222
head_dim = self.network_config_["hidden_size"] // self.network_config_["num_attention_heads"]
2323
self.head_dim = self.network_config_.get("head_dim", head_dim)
24-
assert self.tp_k_head_num_ * self.tp_world_size_ % self.network_config_["num_key_value_heads"] == 0
24+
assert (self.tp_k_head_num_ * self.tp_world_size_) % self.network_config_["num_key_value_heads"] == 0
2525

2626
def _repeat_weight(self, name, weights):
2727
# for tp_world_size_ > num_key_value_heads
@@ -30,7 +30,7 @@ def _repeat_weight(self, name, weights):
3030

3131
tensor = weights[name]
3232
num_kv_heads = self.network_config_["num_key_value_heads"]
33-
repeat_size = self.tp_k_head_num_ * self.tp_world_size_ // num_kv_heads
33+
repeat_size = (self.tp_k_head_num_ * self.tp_world_size_) // num_kv_heads
3434

3535
if tensor.ndim == 1:
3636
# Bias (1D tensor)

0 commit comments

Comments
 (0)