You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
description: "Phi 3 model was created by Microsoft and is optimized for strong reasoning (especially math and logic).\n"+
84
-
"This is the smallversion of the model.",
83
+
description: "Mistral Nemo model was created by Mistral AI and was trained on large proportion of multilingual and code data, with support for function calling.\n"+
84
+
"It was trained jointly by Mistral AI and NVIDIA.\n"+
description: "Llama 2 Chat model was created by Meta and is optimized for an assistant-like chat use cases.\n"+
122
-
"This is the 13 billion parameters version of the model.",
113
+
name: "Phi 3 3.8B",
114
+
abilities: ["chat","complete","functionCalling"],
115
+
description: "Phi 3 model was created by Microsoft and is optimized for strong reasoning (especially math and logic).\n"+
116
+
"This is the smallversion of the model.",
123
117
124
118
fileOptions: [{
125
119
huggingFace: {
126
-
model: "TheBloke/Llama-2-13B-chat-GGUF",
120
+
model: "bartowski/Phi-3.1-mini-4k-instruct-GGUF",
127
121
branch: "main",
128
-
file: "llama-2-13b-chat.Q5_K_M.gguf"
122
+
file: "Phi-3.1-mini-4k-instruct-Q8_0.gguf"
129
123
}
130
124
},{
131
125
huggingFace: {
132
-
model: "TheBloke/Llama-2-13B-chat-GGUF",
126
+
model: "bartowski/Phi-3.1-mini-4k-instruct-GGUF",
133
127
branch: "main",
134
-
file: "llama-2-13b-chat.Q4_K_M.gguf"
128
+
file: "Phi-3.1-mini-4k-instruct-Q4_K_M.gguf"
135
129
}
136
130
}]
137
131
},{
138
-
name: "Llama 2 Chat 70B",
139
-
abilities: ["chat","complete"],
140
-
description: "Llama 2 Chat model was created by Meta and is optimized for an assistant-like chat use cases.\n"+
141
-
"This is the 70 billion parameters version of the model. "+
142
-
"You need a GPU with a lot of VRAM to use this version.",
132
+
name: "OLMoE 1B 7B MoE",
133
+
abilities: ["chat"],
134
+
description: "OLMoE models were created by AllenAI, and are fully open source models that utilize a Mixture of Experts architecture.\n"+
135
+
"Mixtures of Experts (MoE) is a technique where different models, each skilled in solving a particular kind of problem, work together to the improve the overall performance on complex tasks.\n"+
136
+
"This model includes 64 expert models, with a total of 7 billion parameters.\n"+
description: "Functionary models were created by Meetkai and are optimized for function calling.\n" +
239
-
"This model is based on Llama 3.\n" +
240
-
"This is the small version of the model.",
241
-
242
-
fileOptions: [{
243
-
huggingFace: {
244
-
model: "meetkai/functionary-small-v2.5-GGUF",
245
-
branch: "main",
246
-
file: "functionary-small-v2.5.f16.gguf"
247
-
}
248
-
}, {
249
-
huggingFace: {
250
-
model: "meetkai/functionary-small-v2.5-GGUF",
251
-
branch: "main",
252
-
file: "functionary-small-v2.5.Q8_0.gguf"
253
-
}
254
-
}, {
255
-
huggingFace: {
256
-
model: "meetkai/functionary-small-v2.5-GGUF",
257
-
branch: "main",
258
-
file: "functionary-small-v2.5.Q4_0.gguf"
259
-
}
260
-
}]
261
-
}, */{
262
-
name: "OLMoE 1b 7B MoE",
263
-
abilities: ["chat"],
264
-
description: "OLMoE models were created by AllenAI, and are fully open source models that utilize a Mixture of Experts architecture"+
265
-
"Mixtures of Experts (MoE) is a technique where different models, each skilled in solving a particular kind of problem, work together to the improve the overall performance on complex tasks.\n"+
266
-
"This model includes 64 expert models, with a total of 7 billion parameters.\n"+
0 commit comments