|
32 | 32 | "systemPrompt": "", |
33 | 33 | "chatTemplate": "{%- set loop_messages = messages %}\n{%- for message in loop_messages %}\n {%- set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' %}\n {{- content }}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}" |
34 | 34 | }, |
| 35 | + { |
| 36 | + "order": "aa1", |
| 37 | + "sha256sum": "5cd4ee65211770f1d99b4f6f4951780b9ef40e29314bd6542bb5bd0ad0bc29d1", |
| 38 | + "name": "DeepSeek-R1-Distill-Qwen-7B", |
| 39 | + "filename": "DeepSeek-R1-Distill-Qwen-7B-Q4_0.gguf", |
| 40 | + "filesize": "4444121056", |
| 41 | + "requires": "3.8.0", |
| 42 | + "ramrequired": "8", |
| 43 | + "parameters": "7 billion", |
| 44 | + "quant": "q4_0", |
| 45 | + "type": "deepseek", |
| 46 | + "description": "<p>The official Qwen2.5-Math-7B distillation of DeepSeek-R1.</p><ul><li>License: <a href=\"https://opensource.org/license/mit\">MIT</a></li><li>No restrictions on commercial use</li><li>#reasoning</li></ul>", |
| 47 | + "url": "https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-7B-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-7B-Q4_0.gguf", |
| 48 | + "chatTemplate": "{%- if not add_generation_prompt is defined %}\n {%- set add_generation_prompt = false %}\n{%- endif %}\n{%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n{%- endif %}\n{%- for message in messages %}\n {%- if message['role'] == 'user' %}\n {{- '<|User|>' + message['content'] }}\n {%- endif %}\n {%- if message['role'] == 'assistant' %}\n {%- set content = message['content'] | regex_replace('^[\\\\s\\\\S]*</think>', '') %}\n {{- '<|Assistant|>' + content + '<|end▁of▁sentence|>' }}\n {%- endif %}\n{%- endfor -%}\n{%- if add_generation_prompt %}\n {{- '<|Assistant|>' }}\n{%- endif %}" |
| 49 | + }, |
| 50 | + { |
| 51 | + "order": "aa2", |
| 52 | + "sha256sum": "906b3382f2680f4ce845459b4a122e904002b075238080307586bcffcde49eef", |
| 53 | + "name": "DeepSeek-R1-Distill-Qwen-14B", |
| 54 | + "filename": "DeepSeek-R1-Distill-Qwen-14B-Q4_0.gguf", |
| 55 | + "filesize": "8544267680", |
| 56 | + "requires": "3.8.0", |
| 57 | + "ramrequired": "16", |
| 58 | + "parameters": "14 billion", |
| 59 | + "quant": "q4_0", |
| 60 | + "type": "deepseek", |
| 61 | + "description": "<p>The official Qwen2.5-14B distillation of DeepSeek-R1.</p><ul><li>License: <a href=\"https://opensource.org/license/mit\">MIT</a></li><li>No restrictions on commercial use</li><li>#reasoning</li></ul>", |
| 62 | + "url": "https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-14B-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-14B-Q4_0.gguf", |
| 63 | + "chatTemplate": "{%- if not add_generation_prompt is defined %}\n {%- set add_generation_prompt = false %}\n{%- endif %}\n{%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n{%- endif %}\n{%- for message in messages %}\n {%- if message['role'] == 'user' %}\n {{- '<|User|>' + message['content'] }}\n {%- endif %}\n {%- if message['role'] == 'assistant' %}\n {%- set content = message['content'] | regex_replace('^[\\\\s\\\\S]*</think>', '') %}\n {{- '<|Assistant|>' + content + '<|end▁of▁sentence|>' }}\n {%- endif %}\n{%- endfor -%}\n{%- if add_generation_prompt %}\n {{- '<|Assistant|>' }}\n{%- endif %}" |
| 64 | + }, |
| 65 | + { |
| 66 | + "order": "aa3", |
| 67 | + "sha256sum": "0eb93e436ac8beec18aceb958c120d282cb2cf5451b23185e7be268fe9d375cc", |
| 68 | + "name": "DeepSeek-R1-Distill-Llama-8B", |
| 69 | + "filename": "DeepSeek-R1-Distill-Llama-8B-Q4_0.gguf", |
| 70 | + "filesize": "4675894112", |
| 71 | + "requires": "3.8.0", |
| 72 | + "ramrequired": "8", |
| 73 | + "parameters": "8 billion", |
| 74 | + "quant": "q4_0", |
| 75 | + "type": "deepseek", |
| 76 | + "description": "<p>The official Llama-3.1-8B distillation of DeepSeek-R1.</p><ul><li>License: <a href=\"https://opensource.org/license/mit\">MIT</a></li><li>No restrictions on commercial use</li><li>#reasoning</li></ul>", |
| 77 | + "url": "https://huggingface.co/bartowski/DeepSeek-R1-Distill-Llama-8B-GGUF/resolve/main/DeepSeek-R1-Distill-Llama-8B-Q4_0.gguf", |
| 78 | + "chatTemplate": "{%- if not add_generation_prompt is defined %}\n {%- set add_generation_prompt = false %}\n{%- endif %}\n{%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n{%- endif %}\n{%- for message in messages %}\n {%- if message['role'] == 'user' %}\n {{- '<|User|>' + message['content'] }}\n {%- endif %}\n {%- if message['role'] == 'assistant' %}\n {%- set content = message['content'] | regex_replace('^[\\\\s\\\\S]*</think>', '') %}\n {{- '<|Assistant|>' + content + '<|end▁of▁sentence|>' }}\n {%- endif %}\n{%- endfor -%}\n{%- if add_generation_prompt %}\n {{- '<|Assistant|>' }}\n{%- endif %}" |
| 79 | + }, |
| 80 | + { |
| 81 | + "order": "aa4", |
| 82 | + "sha256sum": "b3af887d0a015b39fab2395e4faf682c1a81a6a3fd09a43f0d4292f7d94bf4d0", |
| 83 | + "name": "DeepSeek-R1-Distill-Qwen-1.5B", |
| 84 | + "filename": "DeepSeek-R1-Distill-Qwen-1.5B-Q4_0.gguf", |
| 85 | + "filesize": "1068807776", |
| 86 | + "requires": "3.8.0", |
| 87 | + "ramrequired": "3", |
| 88 | + "parameters": "1.5 billion", |
| 89 | + "quant": "q4_0", |
| 90 | + "type": "deepseek", |
| 91 | + "description": "<p>The official Qwen2.5-Math-1.5B distillation of DeepSeek-R1.</p><ul><li>License: <a href=\"https://opensource.org/license/mit\">MIT</a></li><li>No restrictions on commercial use</li><li>#reasoning</li></ul>", |
| 92 | + "url": "https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-1.5B-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-1.5B-Q4_0.gguf", |
| 93 | + "chatTemplate": "{%- if not add_generation_prompt is defined %}\n {%- set add_generation_prompt = false %}\n{%- endif %}\n{%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n{%- endif %}\n{%- for message in messages %}\n {%- if message['role'] == 'user' %}\n {{- '<|User|>' + message['content'] }}\n {%- endif %}\n {%- if message['role'] == 'assistant' %}\n {%- set content = message['content'] | regex_replace('^[\\\\s\\\\S]*</think>', '') %}\n {{- '<|Assistant|>' + content + '<|end▁of▁sentence|>' }}\n {%- endif %}\n{%- endfor -%}\n{%- if add_generation_prompt %}\n {{- '<|Assistant|>' }}\n{%- endif %}" |
| 94 | + }, |
35 | 95 | { |
36 | 96 | "order": "b", |
37 | 97 | "md5sum": "27b44e8ae1817525164ddf4f8dae8af4", |
|
472 | 532 | "filename": "qwen2-1_5b-instruct-q4_0.gguf", |
473 | 533 | "filesize": "937532800", |
474 | 534 | "requires": "3.0", |
475 | | - "ramrequired": "4", |
| 535 | + "ramrequired": "3", |
476 | 536 | "parameters": "1.5 billion", |
477 | 537 | "quant": "q4_0", |
478 | 538 | "type": "qwen2", |
|
0 commit comments