@@ -47,6 +47,8 @@ public class LLMTypeRegistry: ModelTypeRegistry, @unchecked Sendable {
4747 " internlm2 " : create ( InternLM2Configuration . self, InternLM2Model . init) ,
4848 " deepseek_v3 " : create ( DeepseekV3Configuration . self, DeepseekV3Model . init) ,
4949 " granite " : create ( GraniteConfiguration . self, GraniteModel . init) ,
50+ " granitemoehybrid " : create (
51+ GraniteMoeHybridConfiguration . self, GraniteMoeHybridModel . init) ,
5052 " mimo " : create ( MiMoConfiguration . self, MiMoModel . init) ,
5153 " glm4 " : create ( GLM4Configuration . self, GLM4Model . init) ,
5254 " acereason " : create ( Qwen2Configuration . self, Qwen2Model . init) ,
@@ -321,6 +323,11 @@ public class LLMRegistry: AbstractModelRegistry, @unchecked Sendable {
321323 defaultPrompt: " Why is the sky blue? "
322324 )
323325
326+ static public let granite_4_0_h_tiny_4bit_dwq = ModelConfiguration (
327+ id: " mlx-community/Granite-4.0-H-Tiny-4bit-DWQ " ,
328+ defaultPrompt: " "
329+ )
330+
324331 private static func all( ) -> [ ModelConfiguration ] {
325332 [
326333 codeLlama13b4bit,
@@ -334,6 +341,7 @@ public class LLMRegistry: AbstractModelRegistry, @unchecked Sendable {
334341 gemma3n_E4B_it_lm_4bit,
335342 gemma3n_E2B_it_lm_4bit,
336343 granite3_3_2b_4bit,
344+ granite_4_0_h_tiny_4bit_dwq,
337345 llama3_1_8B_4bit,
338346 llama3_2_1B_4bit,
339347 llama3_2_3B_4bit,
0 commit comments