We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent 8722104 commit 7e88212Copy full SHA for 7e88212
tensorrt_llm/_torch/auto_deploy/models/patches/phi.py
@@ -70,9 +70,12 @@ def _patched_phi3_long_emb_init(
70
self,
71
):
72
_patched_phi3_emb_init(self)
73
- self.ext_factors = torch.tensor(
+ ext_factors_tensor = torch.tensor(
74
self.short_factor, dtype=torch.float32, device=torch.device("cpu")
75
)
76
+ if hasattr(self, "ext_factors"):
77
+ delattr(self, "ext_factors")
78
+ self.register_buffer("ext_factors", ext_factors_tensor, persistent=False)
79
80
81
# Copied from https://huggingface.co/microsoft/Phi-3-mini-4k-instruct/blob/main/modeling_phi3.py#L151
0 commit comments