Skip to content

Commit 58a35be

Browse files
Merge pull request #299 from menloresearch/update-dev-from-master-2025-10-21-00-34
Sync master with upstream release b6811
2 parents 4675215 + 6de8ed7 commit 58a35be

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

53 files changed

+2557
-330
lines changed

README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -138,6 +138,7 @@ Instructions for adding support for new models: [HOWTO-add-model.md](docs/develo
138138
- [x] [Ling models](https://huggingface.co/collections/inclusionAI/ling-67c51c85b34a7ea0aba94c32)
139139
- [x] [LFM2 models](https://huggingface.co/collections/LiquidAI/lfm2-686d721927015b2ad73eaa38)
140140
- [x] [Hunyuan models](https://huggingface.co/collections/tencent/hunyuan-dense-model-6890632cda26b19119c9c5e7)
141+
- [x] [BailingMoeV2 (Ring/Ling 2.0) models](https://huggingface.co/collections/inclusionAI/ling-v2-68bf1dd2fc34c306c1fa6f86)
141142

142143
#### Multimodal
143144

@@ -187,6 +188,7 @@ Instructions for adding support for new models: [HOWTO-add-model.md](docs/develo
187188
- Swift [srgtuszy/llama-cpp-swift](https://github.com/srgtuszy/llama-cpp-swift)
188189
- Swift [ShenghaiWang/SwiftLlama](https://github.com/ShenghaiWang/SwiftLlama)
189190
- Delphi [Embarcadero/llama-cpp-delphi](https://github.com/Embarcadero/llama-cpp-delphi)
191+
- Go (no CGo needed): [hybridgroup/yzma](https://github.com/hybridgroup/yzma)
190192

191193
</details>
192194

convert_hf_to_gguf.py

Lines changed: 99 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -892,8 +892,8 @@ def get_vocab_base_pre(self, tokenizer) -> str:
892892
# ref: https://huggingface.co/JetBrains/Mellum-4b-base
893893
res = "mellum"
894894
if chkhsh == "9b1be57e70d20d9501b2b3186e792d81181ae36ada3903c26f9fea418cf87206":
895-
# ref: https://huggingface.co/inclusionAI/LLaDA-MoE-7B-A1B-Base
896-
res = "llada-moe"
895+
# ref: https://huggingface.co/inclusionAI/Ling-mini-base-2.0
896+
res = "bailingmoe2"
897897
if chkhsh == "53e325976a6e142379c19b09afcae354f2f496f147afa8f9e189a33fe4e3024e":
898898
# ref: https://huggingface.co/ibm-granite/granite-docling-258M
899899
res = "granite-docling"
@@ -8055,6 +8055,103 @@ def prepare_tensors(self):
80558055
raise ValueError(f"Unprocessed experts: {experts}")
80568056

80578057

8058+
@ModelBase.register("BailingMoeV2ForCausalLM")
8059+
class BailingMoeV2Model(TextModel):
8060+
model_arch = gguf.MODEL_ARCH.BAILINGMOE2
8061+
8062+
def __init__(self, *args, **kwargs):
8063+
super().__init__(*args, **kwargs)
8064+
if nextn_layers := self.hparams.get("num_nextn_predict_layers", 0):
8065+
self.block_count = self.hparams["num_hidden_layers"] + nextn_layers
8066+
self.tensor_map = gguf.get_tensor_name_map(self.model_arch, self.block_count)
8067+
8068+
def set_vocab(self):
8069+
self._set_vocab_gpt2()
8070+
8071+
def set_gguf_parameters(self):
8072+
super().set_gguf_parameters()
8073+
hparams = self.hparams
8074+
if (rope_dim := hparams.get("head_dim")) is None:
8075+
rope_dim = hparams["hidden_size"] // hparams["num_attention_heads"]
8076+
8077+
self.gguf_writer.add_rope_dimension_count(int(rope_dim * self.hparams.get("partial_rotary_factor", 0.5)))
8078+
rope_scaling = self.hparams.get("rope_scaling") or {}
8079+
if rope_scaling.get("rope_type", rope_scaling.get("type")) == "yarn" and "factor" in rope_scaling:
8080+
self.gguf_writer.add_rope_scaling_type(gguf.RopeScalingType.YARN)
8081+
self.gguf_writer.add_rope_scaling_factor(rope_scaling["factor"])
8082+
self.gguf_writer.add_rope_scaling_orig_ctx_len(rope_scaling["original_max_position_embeddings"])
8083+
else:
8084+
self.gguf_writer.add_rope_scaling_type(gguf.RopeScalingType.NONE)
8085+
self.gguf_writer.add_leading_dense_block_count(hparams["first_k_dense_replace"])
8086+
self.gguf_writer.add_vocab_size(hparams["vocab_size"])
8087+
self.gguf_writer.add_expert_feed_forward_length(hparams["moe_intermediate_size"])
8088+
self.gguf_writer.add_expert_shared_feed_forward_length(hparams.get("moe_shared_expert_intermediate_size", hparams["moe_intermediate_size"] * hparams["num_shared_experts"]))
8089+
self.gguf_writer.add_expert_weights_scale(hparams["routed_scaling_factor"])
8090+
self.gguf_writer.add_expert_count(hparams["num_experts"])
8091+
self.gguf_writer.add_expert_shared_count(hparams["num_shared_experts"])
8092+
self.gguf_writer.add_expert_group_count(hparams["n_group"])
8093+
self.gguf_writer.add_expert_group_used_count(hparams["topk_group"])
8094+
self.gguf_writer.add_expert_weights_norm(hparams["norm_topk_prob"])
8095+
8096+
if hparams["score_function"] == "sigmoid":
8097+
self.gguf_writer.add_expert_gating_func(gguf.ExpertGatingFuncType.SIGMOID)
8098+
elif hparams["score_function"] == "softmax":
8099+
self.gguf_writer.add_expert_gating_func(gguf.ExpertGatingFuncType.SOFTMAX)
8100+
else:
8101+
raise ValueError(f"Unsupported score_function value: {hparams['score_function']}")
8102+
8103+
if (nextn_layers := self.hparams.get("num_nextn_predict_layers")) is not None:
8104+
self.gguf_writer.add_nextn_predict_layers(nextn_layers)
8105+
8106+
_experts: list[dict[str, Tensor]] | None = None
8107+
8108+
def modify_tensors(self, data_torch: Tensor, name: str, bid: int | None) -> Iterable[tuple[str, Tensor]]:
8109+
if "mlp.experts" in name:
8110+
n_experts = self.hparams["num_experts"]
8111+
assert bid is not None
8112+
8113+
tensors: list[tuple[str, Tensor]] = []
8114+
8115+
if self._experts is None:
8116+
self._experts = [{} for _ in range(self.block_count)]
8117+
8118+
self._experts[bid][name] = data_torch
8119+
8120+
if len(self._experts[bid]) >= n_experts * 3:
8121+
# merge the experts into a single 3d tensor
8122+
for w_name in ["down_proj", "gate_proj", "up_proj"]:
8123+
datas: list[Tensor] = []
8124+
8125+
for xid in range(n_experts):
8126+
ename = f"model.layers.{bid}.mlp.experts.{xid}.{w_name}.weight"
8127+
datas.append(self._experts[bid][ename])
8128+
del self._experts[bid][ename]
8129+
8130+
data_torch = torch.stack(datas, dim=0)
8131+
8132+
merged_name = f"model.layers.{bid}.mlp.experts.{w_name}.weight"
8133+
8134+
new_name = self.map_tensor_name(merged_name)
8135+
8136+
tensors.append((new_name, data_torch))
8137+
8138+
return tensors
8139+
8140+
if name.endswith(".expert_bias"):
8141+
name = name.replace(".expert_bias", ".expert_bias.bias")
8142+
8143+
return [(self.map_tensor_name(name), data_torch)]
8144+
8145+
def prepare_tensors(self):
8146+
super().prepare_tensors()
8147+
8148+
if self._experts is not None:
8149+
# flatten `list[dict[str, Tensor]]` into `list[str]`
8150+
experts = [k for d in self._experts for k in d.keys()]
8151+
if len(experts) > 0:
8152+
raise ValueError(f"Unprocessed experts: {experts}")
8153+
8154+
80588155
@ModelBase.register("GroveMoeForCausalLM", "modeling_grove_moe.GroveMoeForCausalLM")
80598156
class GroveMoeModel(TextModel):
80608157
model_arch = gguf.MODEL_ARCH.GROVEMOE

convert_hf_to_gguf_update.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,7 @@ class TOKENIZER_TYPE(IntEnum):
139139
{"name": "lfm2", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/LiquidAI/LFM2-Tokenizer"},
140140
{"name": "exaone4", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/LGAI-EXAONE/EXAONE-4.0-32B", },
141141
{"name": "mellum", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/JetBrains/Mellum-4b-base", },
142-
{"name": "llada-moe", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/inclusionAI/LLaDA-MoE-7B-A1B-Base", },
142+
{"name": "bailingmoe2", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/inclusionAI/Ling-mini-base-2.0", },
143143
{"name": "granite-docling", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/ibm-granite/granite-docling-258M", },
144144
]
145145

docs/ops.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ Legend:
2222
| ARANGE ||||||||||
2323
| ARGMAX ||||||||||
2424
| ARGSORT ||||||||||
25-
| CEIL ||||||| |||
25+
| CEIL ||||||| |||
2626
| CLAMP ||||| 🟡 | 🟡 || 🟡 ||
2727
| CONCAT |||| 🟡 || 🟡 | 🟡 |||
2828
| CONT || 🟡 |||| 🟡 | 🟡 | 🟡 ||
@@ -42,7 +42,7 @@ Legend:
4242
| ELU |||| 🟡 | 🟡 || 🟡 |||
4343
| EXP |||| 🟡 | 🟡 || 🟡 |||
4444
| FLASH_ATTN_EXT || 🟡 || 🟡 | 🟡 ||| 🟡 ||
45-
| FLOOR ||||||| |||
45+
| FLOOR ||||||| |||
4646
| GATED_LINEAR_ATTN ||||||||||
4747
| GEGLU ||||| 🟡 ||| 🟡 ||
4848
| GEGLU_ERF ||||| 🟡 ||| 🟡 ||
@@ -72,7 +72,7 @@ Legend:
7272
| OPT_STEP_SGD ||||||||||
7373
| OUT_PROD | 🟡 || 🟡 | 🟡 ||| 🟡 |||
7474
| PAD ||||||| 🟡 |||
75-
| PAD_REFLECT_1D ||||||| |||
75+
| PAD_REFLECT_1D ||||||| |||
7676
| POOL_2D || 🟡 ||||||||
7777
| REGLU ||||| 🟡 ||| 🟡 ||
7878
| RELU |||| 🟡 | 🟡 | 🟡 | 🟡 | 🟡 ||
@@ -84,7 +84,7 @@ Legend:
8484
| ROLL ||||||||||
8585
| ROPE || 🟡 ||||||||
8686
| ROPE_BACK ||||||||||
87-
| ROUND ||||||| |||
87+
| ROUND ||||||| |||
8888
| RWKV_WKV6 ||||||||||
8989
| RWKV_WKV7 ||||||||||
9090
| SCALE || 🟡 ||||||||
@@ -111,6 +111,6 @@ Legend:
111111
| TANH |||| 🟡 | 🟡 || 🟡 | 🟡 ||
112112
| TIMESTEP_EMBEDDING ||||||||||
113113
| TOPK_MOE ||||||||||
114-
| TRUNC ||||||| |||
114+
| TRUNC ||||||| |||
115115
| UPSCALE || 🟡 ||| 🟡 || 🟡 |||
116116
| XIELU ||||||||||

docs/ops/SYCL.csv

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,14 @@
3131
"SYCL0","GELU_ERF","type=f16,ne_a=[5,7,11,13],v=0","support","1","yes","SYCL"
3232
"SYCL0","XIELU","type=f16,ne_a=[128,2,2,2],v=0","support","0","no","SYCL"
3333
"SYCL0","XIELU","type=f16,ne_a=[5,7,11,13],v=0","support","0","no","SYCL"
34+
"SYCL0","FLOOR","type=f16,ne_a=[128,2,2,2],v=0","support","1","yes","SYCL"
35+
"SYCL0","FLOOR","type=f16,ne_a=[5,7,11,13],v=0","support","1","yes","SYCL"
36+
"SYCL0","CEIL","type=f16,ne_a=[128,2,2,2],v=0","support","1","yes","SYCL"
37+
"SYCL0","CEIL","type=f16,ne_a=[5,7,11,13],v=0","support","1","yes","SYCL"
38+
"SYCL0","ROUND","type=f16,ne_a=[128,2,2,2],v=0","support","1","yes","SYCL"
39+
"SYCL0","ROUND","type=f16,ne_a=[5,7,11,13],v=0","support","1","yes","SYCL"
40+
"SYCL0","TRUNC","type=f16,ne_a=[128,2,2,2],v=0","support","1","yes","SYCL"
41+
"SYCL0","TRUNC","type=f16,ne_a=[5,7,11,13],v=0","support","1","yes","SYCL"
3442
"SYCL0","ABS","type=f16,ne_a=[128,2,2,2],v=1","support","0","no","SYCL"
3543
"SYCL0","ABS","type=f16,ne_a=[5,7,11,13],v=1","support","0","no","SYCL"
3644
"SYCL0","SGN","type=f16,ne_a=[128,2,2,2],v=1","support","0","no","SYCL"
@@ -95,6 +103,14 @@
95103
"SYCL0","GELU_ERF","type=f32,ne_a=[5,7,11,13],v=0","support","1","yes","SYCL"
96104
"SYCL0","XIELU","type=f32,ne_a=[128,2,2,2],v=0","support","0","no","SYCL"
97105
"SYCL0","XIELU","type=f32,ne_a=[5,7,11,13],v=0","support","0","no","SYCL"
106+
"SYCL0","FLOOR","type=f32,ne_a=[128,2,2,2],v=0","support","1","yes","SYCL"
107+
"SYCL0","FLOOR","type=f32,ne_a=[5,7,11,13],v=0","support","1","yes","SYCL"
108+
"SYCL0","CEIL","type=f32,ne_a=[128,2,2,2],v=0","support","1","yes","SYCL"
109+
"SYCL0","CEIL","type=f32,ne_a=[5,7,11,13],v=0","support","1","yes","SYCL"
110+
"SYCL0","ROUND","type=f32,ne_a=[128,2,2,2],v=0","support","1","yes","SYCL"
111+
"SYCL0","ROUND","type=f32,ne_a=[5,7,11,13],v=0","support","1","yes","SYCL"
112+
"SYCL0","TRUNC","type=f32,ne_a=[128,2,2,2],v=0","support","1","yes","SYCL"
113+
"SYCL0","TRUNC","type=f32,ne_a=[5,7,11,13],v=0","support","1","yes","SYCL"
98114
"SYCL0","ABS","type=f32,ne_a=[128,2,2,2],v=1","support","0","no","SYCL"
99115
"SYCL0","ABS","type=f32,ne_a=[5,7,11,13],v=1","support","0","no","SYCL"
100116
"SYCL0","SGN","type=f32,ne_a=[128,2,2,2],v=1","support","0","no","SYCL"
@@ -9363,8 +9379,8 @@
93639379
"SYCL0","ACC","type=f32,ne_a=[256,17,1,1],ne_b=[256,16,1,1]","support","1","yes","SYCL"
93649380
"SYCL0","PAD","type=f32,ne_a=[512,512,1,1],pad_0=1,pad_1=1","support","1","yes","SYCL"
93659381
"SYCL0","PAD","type=f32,ne_a=[512,512,3,1],lp0=1,rp0=1,lp1=1,rp1=1,lp2=1,rp2=1,lp3=1,rp3=1,v=0","support","1","yes","SYCL"
9366-
"SYCL0","PAD_REFLECT_1D","type=f32,ne_a=[512,34,2,1],pad_0=10,pad_1=9","support","0","no","SYCL"
9367-
"SYCL0","PAD_REFLECT_1D","type=f32,ne_a=[3000,384,4,1],pad_0=10,pad_1=9","support","0","no","SYCL"
9382+
"SYCL0","PAD_REFLECT_1D","type=f32,ne_a=[3000,384,4,1],pad_0=10,pad_1=9","support","0","yes","SYCL"
9383+
"SYCL0","PAD_REFLECT_1D","type=f32,ne_a=[512,34,2,1],pad_0=10,pad_1=9","support","0","yes","SYCL"
93689384
"SYCL0","ROLL","shift0=3,shift1=-2,shift3=1,shift4=-1","support","0","no","SYCL"
93699385
"SYCL0","ARANGE","type=f32,start=0.000000,stop=10.000000,step=1.000000","support","0","no","SYCL"
93709386
"SYCL0","TIMESTEP_EMBEDDING","type=f32,ne_a=[2,1,1,1],dim=320,max_period=10000","support","1","yes","SYCL"

docs/ops/Vulkan.csv

Lines changed: 21 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -3263,27 +3263,27 @@
32633263
"Vulkan0","RMS_NORM_MUL_ADD","type=f32,ne=[64,5,4,3],eps=1.000000,broadcast=0","support","1","yes","Vulkan"
32643264
"Vulkan0","RMS_NORM_MUL_ADD","type=f32,ne=[64,5,4,3],eps=1.000000,broadcast=1","support","1","yes","Vulkan"
32653265
"Vulkan0","L2_NORM","type=f32,ne=[64,5,4,3]","support","1","yes","Vulkan"
3266-
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,1024,1,1],ne_b=[3,1024,1,1]","support","0","no","Vulkan"
3267-
"Vulkan0","SSM_CONV","type=f32,ne_a=[8,1024,1,1],ne_b=[3,1024,1,1]","support","0","no","Vulkan"
3268-
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,1024,4,1],ne_b=[3,1024,1,1]","support","0","no","Vulkan"
3269-
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,1536,1,1],ne_b=[3,1536,1,1]","support","0","no","Vulkan"
3270-
"Vulkan0","SSM_CONV","type=f32,ne_a=[8,1536,1,1],ne_b=[3,1536,1,1]","support","0","no","Vulkan"
3271-
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,1536,4,1],ne_b=[3,1536,1,1]","support","0","no","Vulkan"
3272-
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,2048,1,1],ne_b=[3,2048,1,1]","support","0","no","Vulkan"
3273-
"Vulkan0","SSM_CONV","type=f32,ne_a=[8,2048,1,1],ne_b=[3,2048,1,1]","support","0","no","Vulkan"
3274-
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,2048,4,1],ne_b=[3,2048,1,1]","support","0","no","Vulkan"
3275-
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,1024,1,1],ne_b=[4,1024,1,1]","support","0","no","Vulkan"
3276-
"Vulkan0","SSM_CONV","type=f32,ne_a=[8,1024,1,1],ne_b=[4,1024,1,1]","support","0","no","Vulkan"
3277-
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,1024,4,1],ne_b=[4,1024,1,1]","support","0","no","Vulkan"
3278-
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,1536,1,1],ne_b=[4,1536,1,1]","support","0","no","Vulkan"
3279-
"Vulkan0","SSM_CONV","type=f32,ne_a=[8,1536,1,1],ne_b=[4,1536,1,1]","support","0","no","Vulkan"
3280-
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,1536,4,1],ne_b=[4,1536,1,1]","support","0","no","Vulkan"
3281-
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,2048,1,1],ne_b=[4,2048,1,1]","support","0","no","Vulkan"
3282-
"Vulkan0","SSM_CONV","type=f32,ne_a=[8,2048,1,1],ne_b=[4,2048,1,1]","support","0","no","Vulkan"
3283-
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,2048,4,1],ne_b=[4,2048,1,1]","support","0","no","Vulkan"
3284-
"Vulkan0","SSM_SCAN","type=f32,d_state=16,head_dim=1,n_head=1024,n_group=1,n_seq_tokens=32,n_seqs=4","support","0","no","Vulkan"
3285-
"Vulkan0","SSM_SCAN","type=f32,d_state=128,head_dim=64,n_head=16,n_group=2,n_seq_tokens=32,n_seqs=4","support","0","no","Vulkan"
3286-
"Vulkan0","SSM_SCAN","type=f32,d_state=256,head_dim=64,n_head=8,n_group=2,n_seq_tokens=32,n_seqs=4","support","0","no","Vulkan"
3266+
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,1024,1,1],ne_b=[3,1024,1,1]","support","1","yes","Vulkan"
3267+
"Vulkan0","SSM_CONV","type=f32,ne_a=[8,1024,1,1],ne_b=[3,1024,1,1]","support","1","yes","Vulkan"
3268+
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,1024,4,1],ne_b=[3,1024,1,1]","support","1","yes","Vulkan"
3269+
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,1536,1,1],ne_b=[3,1536,1,1]","support","1","yes","Vulkan"
3270+
"Vulkan0","SSM_CONV","type=f32,ne_a=[8,1536,1,1],ne_b=[3,1536,1,1]","support","1","yes","Vulkan"
3271+
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,1536,4,1],ne_b=[3,1536,1,1]","support","1","yes","Vulkan"
3272+
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,2048,1,1],ne_b=[3,2048,1,1]","support","1","yes","Vulkan"
3273+
"Vulkan0","SSM_CONV","type=f32,ne_a=[8,2048,1,1],ne_b=[3,2048,1,1]","support","1","yes","Vulkan"
3274+
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,2048,4,1],ne_b=[3,2048,1,1]","support","1","yes","Vulkan"
3275+
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,1024,1,1],ne_b=[4,1024,1,1]","support","1","yes","Vulkan"
3276+
"Vulkan0","SSM_CONV","type=f32,ne_a=[8,1024,1,1],ne_b=[4,1024,1,1]","support","1","yes","Vulkan"
3277+
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,1024,4,1],ne_b=[4,1024,1,1]","support","1","yes","Vulkan"
3278+
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,1536,1,1],ne_b=[4,1536,1,1]","support","1","yes","Vulkan"
3279+
"Vulkan0","SSM_CONV","type=f32,ne_a=[8,1536,1,1],ne_b=[4,1536,1,1]","support","1","yes","Vulkan"
3280+
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,1536,4,1],ne_b=[4,1536,1,1]","support","1","yes","Vulkan"
3281+
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,2048,1,1],ne_b=[4,2048,1,1]","support","1","yes","Vulkan"
3282+
"Vulkan0","SSM_CONV","type=f32,ne_a=[8,2048,1,1],ne_b=[4,2048,1,1]","support","1","yes","Vulkan"
3283+
"Vulkan0","SSM_CONV","type=f32,ne_a=[4,2048,4,1],ne_b=[4,2048,1,1]","support","1","yes","Vulkan"
3284+
"Vulkan0","SSM_SCAN","type=f32,d_state=16,head_dim=1,n_head=1024,n_group=1,n_seq_tokens=32,n_seqs=4","support","1","yes","Vulkan"
3285+
"Vulkan0","SSM_SCAN","type=f32,d_state=128,head_dim=64,n_head=16,n_group=2,n_seq_tokens=32,n_seqs=4","support","1","yes","Vulkan"
3286+
"Vulkan0","SSM_SCAN","type=f32,d_state=256,head_dim=64,n_head=8,n_group=2,n_seq_tokens=32,n_seqs=4","support","1","yes","Vulkan"
32873287
"Vulkan0","RWKV_WKV6","type=f32,head_count=32,head_size=64,n_seq_tokens=1,n_seqs=1","support","1","yes","Vulkan"
32883288
"Vulkan0","RWKV_WKV6","type=f32,head_count=32,head_size=64,n_seq_tokens=32,n_seqs=1","support","1","yes","Vulkan"
32893289
"Vulkan0","RWKV_WKV6","type=f32,head_count=32,head_size=64,n_seq_tokens=32,n_seqs=4","support","1","yes","Vulkan"

0 commit comments

Comments
 (0)