Skip to content

Commit 5fc5194

Browse files
committed
update meta_weights
1 parent ded7eea commit 5fc5194

File tree

3 files changed

+2
-4
lines changed

3 files changed

+2
-4
lines changed

lightllm/common/basemodel/basemodel.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -65,8 +65,6 @@ def __init__(self, kvargs):
6565
self.quant_type = kvargs.get("quant_type", None)
6666
self.quant_cfg_path = kvargs.get("quant_cfg", None)
6767
self.mem_fraction = kvargs.get("mem_fraction", 0.9)
68-
self.disable_qk_absorb = kvargs.get("disable_qk_absorb", False)
69-
self.disable_vo_absorb = kvargs.get("disable_vo_absorb", False)
7068

7169
self._init_datatype()
7270
self._init_config()

lightllm/common/basemodel/layer_weights/meta_weights/mm_weight.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ def load_hf_weights(self, weights):
8686
self.weight = weight[:, start:end]
8787
if self.bias_name in weights:
8888
bias = weights[self.bias_name].to(self.data_type_)
89-
self.bias = bias.cuda(self.tp_rank_) / self.world_size_
89+
self.bias = (bias / self.world_size_).cuda(self.tp_rank_)
9090
if weight is None:
9191
return
9292
self._post_load_weights()

lightllm/common/basemodel/layer_weights/meta_weights/norm_weight.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ def __init__(self, weight_name, data_type, bias_name=None):
3232

3333
def load_hf_weights(self, weights):
3434
if self.weight_name in weights:
35-
self.weight = weights[self.weight_name].to(self.data_type_).cuda(self.tp_rank_) + 1
35+
self.weight = (weights[self.weight_name].to(self.data_type_) + 1).cuda(self.tp_rank_)
3636

3737

3838
class TpNormWeight(NormWeight):

0 commit comments

Comments
 (0)