Skip to content

Commit d37cf7e

Browse files
committed
Fix regressions
1 parent b28300c commit d37cf7e

File tree

2 files changed

+3
-2
lines changed

2 files changed

+3
-2
lines changed

examples/chat.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -262,7 +262,7 @@ def get_tokenized_context(max_len):
262262
# Stop conditions
263263

264264
sc = prompt_format.stop_conditions(tokenizer)
265-
sc = [x for x in sc if x]
265+
sc = [x for x in sc if x is not None]
266266
generator.set_stop_conditions(sc)
267267

268268
# ANSI color codes

exllamav2/architecture.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -401,6 +401,7 @@ class Params:
401401
"mlp_up": ".mlp.c_fc",
402402
"mlp_down": ".mlp.c_proj",
403403
"lm_head": "model.embed_tokens",
404+
"norm_eps": "layer_norm_epsilon",
404405
})
405406
self.lm.mlp_act_func = "gelu"
406407
self.lm.norm = "layernorm"
@@ -471,9 +472,9 @@ class Params:
471472
"mlp_up": ".block_sparse_moe.experts.*.v1",
472473
"mlp_down": ".block_sparse_moe.experts.*.w2",
473474
"mlp_expert_gate": ".block_sparse_moe.gate",
474-
"lm_head": "model.embed_tokens",
475475
"fused_qkv": ".self_attn.Wqkv",
476476
})
477+
self.lm.norm = "layernorm"
477478
self.lm.is_moe = True
478479

479480
# Phi3

0 commit comments

Comments
 (0)