Skip to content

Commit a5307f5

Browse files
committed
py : fix position embeddings chop [no ci]
1 parent fbbb64f commit a5307f5

File tree

1 file changed

+5
-5
lines changed

1 file changed

+5
-5
lines changed

convert_hf_to_gguf.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2701,16 +2701,16 @@ def set_vocab(self):
27012701
self.gguf_writer.add_add_eos_token(True)
27022702

27032703
def modify_tensors(self, data_torch: Tensor, name: str, bid: int | None) -> Iterable[tuple[str, Tensor]]:
2704-
# position embeddings start at pad_token_id + 1, so just chop down the weight tensor
2705-
if name == "embeddings.position_embeddings.weight":
2706-
if self._position_offset is not None:
2707-
data_torch = data_torch[self._position_offset:,:]
2708-
27092704
# if name starts with "roberta.", remove the prefix
27102705
# e.g. https://huggingface.co/BAAI/bge-reranker-v2-m3/tree/main
27112706
if name.startswith("roberta."):
27122707
name = name[8:]
27132708

2709+
# position embeddings start at pad_token_id + 1, so just chop down the weight tensor
2710+
if name == "embeddings.position_embeddings.weight":
2711+
if self._position_offset is not None:
2712+
data_torch = data_torch[self._position_offset:,:]
2713+
27142714
return super().modify_tensors(data_torch, name, bid)
27152715

27162716

0 commit comments

Comments
 (0)