Skip to content

Commit 5d662ce

Browse files
authored
Remove second layer norm from phi component mapping (#1059)
* Remove second layer norm from phi component mapping * Configure prepend_bos to be off by default
1 parent 5464167 commit 5d662ce

File tree

1 file changed

+3
-3
lines changed
  • transformer_lens/model_bridge/supported_architectures

1 file changed

+3
-3
lines changed

transformer_lens/model_bridge/supported_architectures/phi.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,8 @@
2121
class PhiArchitectureAdapter(ArchitectureAdapter):
2222
"""Architecture adapter for Phi models."""
2323

24+
default_cfg = {"use_fast": False}
25+
2426
def __init__(self, cfg: Any) -> None:
2527
"""Initialize the Phi architecture adapter.
2628
@@ -29,7 +31,7 @@ def __init__(self, cfg: Any) -> None:
2931
"""
3032
super().__init__(cfg)
3133

32-
self.default_cfg = {"use_fast": False}
34+
self.cfg.default_prepend_bos = False
3335

3436
self.conversion_rules = HookConversionSet(
3537
{
@@ -96,8 +98,6 @@ def __init__(self, cfg: Any) -> None:
9698
"o": LinearBridge(name="dense"),
9799
},
98100
),
99-
# Layer norm 1 and 2 are tied.
100-
"ln2": NormalizationBridge(name="input_layernorm", config=self.cfg),
101101
"mlp": MLPBridge(
102102
name="mlp",
103103
submodules={

0 commit comments

Comments
 (0)