Skip to content

Commit c0eb4be

Browse files
support z image (#213)
* support z image * Update diffsynth_engine/configs/pipeline.py Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --------- Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
1 parent 4215ce4 commit c0eb4be

File tree

22 files changed

+910427
-3
lines changed

22 files changed

+910427
-3
lines changed

diffsynth_engine/__init__.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,13 @@
66
WanSpeech2VideoPipelineConfig,
77
QwenImagePipelineConfig,
88
HunyuanPipelineConfig,
9+
ZImagePipelineConfig,
910
SDStateDicts,
1011
SDXLStateDicts,
1112
FluxStateDicts,
1213
WanStateDicts,
1314
QwenImageStateDicts,
15+
ZImageStateDicts,
1416
AttnImpl,
1517
SpargeAttentionParams,
1618
VideoSparseAttentionParams,
@@ -55,11 +57,13 @@
5557
"WanSpeech2VideoPipelineConfig",
5658
"QwenImagePipelineConfig",
5759
"HunyuanPipelineConfig",
60+
"ZImagePipelineConfig",
5861
"SDStateDicts",
5962
"SDXLStateDicts",
6063
"FluxStateDicts",
6164
"WanStateDicts",
6265
"QwenImageStateDicts",
66+
"ZImageStateDicts",
6367
"AttnImpl",
6468
"SpargeAttentionParams",
6569
"VideoSparseAttentionParams",
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
{
2+
"architectures": [
3+
"Qwen3ForCausalLM"
4+
],
5+
"attention_bias": false,
6+
"attention_dropout": 0.0,
7+
"bos_token_id": 151643,
8+
"eos_token_id": 151645,
9+
"head_dim": 128,
10+
"hidden_act": "silu",
11+
"hidden_size": 2560,
12+
"initializer_range": 0.02,
13+
"intermediate_size": 9728,
14+
"max_position_embeddings": 40960,
15+
"max_window_layers": 36,
16+
"model_type": "qwen3",
17+
"num_attention_heads": 32,
18+
"num_hidden_layers": 36,
19+
"num_key_value_heads": 8,
20+
"rms_norm_eps": 1e-06,
21+
"rope_scaling": null,
22+
"rope_theta": 1000000,
23+
"sliding_window": null,
24+
"tie_word_embeddings": true,
25+
"torch_dtype": "bfloat16",
26+
"transformers_version": "4.51.0",
27+
"use_cache": true,
28+
"use_sliding_window": false,
29+
"vocab_size": 151936
30+
}

0 commit comments

Comments
 (0)