Skip to content

Commit c56690c

Browse files
authored
feat(sdk): Sync with latest OpenAPI specification
- Add OllamaCloud and Mistral providers to Provider enum - Update Display and TryFrom implementations for new providers - Add Serialize derive to ChatCompletionMessageToolCallChunk - Add logprobs field to ChatCompletionChoice - Add reasoning_format field to request/response types - Update all tests to include new providers and logprobs field Co-authored-by: Eden Reich <edenreich@users.noreply.github.com> 🤖 Generated with [Claude Code](https://claude.ai/code)
1 parent 99cd978 commit c56690c

File tree

1 file changed

+32
-1
lines changed

1 file changed

+32
-1
lines changed

src/lib.rs

Lines changed: 32 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -172,6 +172,8 @@ pub struct ListAgentsResponse {
172172
pub enum Provider {
173173
#[serde(alias = "Ollama", alias = "OLLAMA")]
174174
Ollama,
175+
#[serde(alias = "OllamaCloud", alias = "OLLAMA_CLOUD", rename = "ollama_cloud")]
176+
OllamaCloud,
175177
#[serde(alias = "Groq", alias = "GROQ")]
176178
Groq,
177179
#[serde(alias = "OpenAI", alias = "OPENAI")]
@@ -186,19 +188,23 @@ pub enum Provider {
186188
Deepseek,
187189
#[serde(alias = "Google", alias = "GOOGLE")]
188190
Google,
191+
#[serde(alias = "Mistral", alias = "MISTRAL")]
192+
Mistral,
189193
}
190194

191195
impl fmt::Display for Provider {
192196
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
193197
match self {
194198
Provider::Ollama => write!(f, "ollama"),
199+
Provider::OllamaCloud => write!(f, "ollama_cloud"),
195200
Provider::Groq => write!(f, "groq"),
196201
Provider::OpenAI => write!(f, "openai"),
197202
Provider::Cloudflare => write!(f, "cloudflare"),
198203
Provider::Cohere => write!(f, "cohere"),
199204
Provider::Anthropic => write!(f, "anthropic"),
200205
Provider::Deepseek => write!(f, "deepseek"),
201206
Provider::Google => write!(f, "google"),
207+
Provider::Mistral => write!(f, "mistral"),
202208
}
203209
}
204210
}
@@ -209,13 +215,15 @@ impl TryFrom<&str> for Provider {
209215
fn try_from(s: &str) -> Result<Self, Self::Error> {
210216
match s.to_lowercase().as_str() {
211217
"ollama" => Ok(Self::Ollama),
218+
"ollama_cloud" => Ok(Self::OllamaCloud),
212219
"groq" => Ok(Self::Groq),
213220
"openai" => Ok(Self::OpenAI),
214221
"cloudflare" => Ok(Self::Cloudflare),
215222
"cohere" => Ok(Self::Cohere),
216223
"anthropic" => Ok(Self::Anthropic),
217224
"deepseek" => Ok(Self::Deepseek),
218225
"google" => Ok(Self::Google),
226+
"mistral" => Ok(Self::Mistral),
219227
_ => Err(GatewayError::BadRequest(format!("Unknown provider: {s}"))),
220228
}
221229
}
@@ -336,10 +344,13 @@ struct CreateChatCompletionRequest {
336344
/// Maximum number of tokens to generate
337345
#[serde(skip_serializing_if = "Option::is_none")]
338346
max_tokens: Option<i32>,
347+
/// The format of the reasoning content. Can be `raw` or `parsed`.
348+
#[serde(skip_serializing_if = "Option::is_none")]
349+
reasoning_format: Option<String>,
339350
}
340351

341352
/// A tool call chunk in streaming responses
342-
#[derive(Debug, Deserialize, Clone)]
353+
#[derive(Debug, Serialize, Deserialize, Clone)]
343354
pub struct ChatCompletionMessageToolCallChunk {
344355
/// Index of the tool call in the array
345356
pub index: i32,
@@ -375,6 +386,8 @@ pub struct ChatCompletionChoice {
375386
pub finish_reason: FinishReason,
376387
pub message: Message,
377388
pub index: i32,
389+
/// Log probability information for the choice
390+
pub logprobs: Option<ChoiceLogprobs>,
378391
}
379392

380393
/// The response from generating content
@@ -406,6 +419,9 @@ pub struct CreateChatCompletionStreamResponse {
406419
/// Usage statistics for the completion request.
407420
#[serde(skip_serializing_if = "Option::is_none")]
408421
pub usage: Option<CompletionUsage>,
422+
/// The format of the reasoning content. Can be `raw` or `parsed`.
423+
#[serde(skip_serializing_if = "Option::is_none")]
424+
pub reasoning_format: Option<String>,
409425
}
410426

411427
/// Token log probability information
@@ -783,6 +799,7 @@ impl InferenceGatewayAPI for InferenceGatewayClient {
783799
stream: false,
784800
tools: self.tools.clone(),
785801
max_tokens: self.max_tokens,
802+
reasoning_format: None,
786803
};
787804

788805
let response = request.json(&request_payload).send().await?;
@@ -828,6 +845,7 @@ impl InferenceGatewayAPI for InferenceGatewayClient {
828845
stream: true,
829846
tools: None,
830847
max_tokens: None,
848+
reasoning_format: None,
831849
};
832850

833851
async_stream::try_stream! {
@@ -989,13 +1007,15 @@ mod tests {
9891007
fn test_provider_serialization() {
9901008
let providers = vec![
9911009
(Provider::Ollama, "ollama"),
1010+
(Provider::OllamaCloud, "ollama_cloud"),
9921011
(Provider::Groq, "groq"),
9931012
(Provider::OpenAI, "openai"),
9941013
(Provider::Cloudflare, "cloudflare"),
9951014
(Provider::Cohere, "cohere"),
9961015
(Provider::Anthropic, "anthropic"),
9971016
(Provider::Deepseek, "deepseek"),
9981017
(Provider::Google, "google"),
1018+
(Provider::Mistral, "mistral"),
9991019
];
10001020

10011021
for (provider, expected) in providers {
@@ -1008,13 +1028,15 @@ mod tests {
10081028
fn test_provider_deserialization() {
10091029
let test_cases = vec![
10101030
("\"ollama\"", Provider::Ollama),
1031+
("\"ollama_cloud\"", Provider::OllamaCloud),
10111032
("\"groq\"", Provider::Groq),
10121033
("\"openai\"", Provider::OpenAI),
10131034
("\"cloudflare\"", Provider::Cloudflare),
10141035
("\"cohere\"", Provider::Cohere),
10151036
("\"anthropic\"", Provider::Anthropic),
10161037
("\"deepseek\"", Provider::Deepseek),
10171038
("\"google\"", Provider::Google),
1039+
("\"mistral\"", Provider::Mistral),
10181040
];
10191041

10201042
for (json, expected) in test_cases {
@@ -1062,13 +1084,15 @@ mod tests {
10621084
fn test_provider_display() {
10631085
let providers = vec![
10641086
(Provider::Ollama, "ollama"),
1087+
(Provider::OllamaCloud, "ollama_cloud"),
10651088
(Provider::Groq, "groq"),
10661089
(Provider::OpenAI, "openai"),
10671090
(Provider::Cloudflare, "cloudflare"),
10681091
(Provider::Cohere, "cohere"),
10691092
(Provider::Anthropic, "anthropic"),
10701093
(Provider::Deepseek, "deepseek"),
10711094
(Provider::Google, "google"),
1095+
(Provider::Mistral, "mistral"),
10721096
];
10731097

10741098
for (provider, expected) in providers {
@@ -1329,6 +1353,7 @@ mod tests {
13291353
{
13301354
"index": 0,
13311355
"finish_reason": "stop",
1356+
"logprobs": null,
13321357
"message": {
13331358
"role": "assistant",
13341359
"content": "Hellloooo"
@@ -1376,6 +1401,7 @@ mod tests {
13761401
{
13771402
"index": 0,
13781403
"finish_reason": "stop",
1404+
"logprobs": null,
13791405
"message": {
13801406
"role": "assistant",
13811407
"content": "Hello"
@@ -1517,6 +1543,7 @@ mod tests {
15171543
{
15181544
"index": 0,
15191545
"finish_reason": "stop",
1546+
"logprobs": null,
15201547
"message": {
15211548
"role": "assistant",
15221549
"content": "Hello"
@@ -1671,6 +1698,7 @@ mod tests {
16711698
{
16721699
"index": 0,
16731700
"finish_reason": "tool_calls",
1701+
"logprobs": null,
16741702
"message": {
16751703
"role": "assistant",
16761704
"content": "Let me check the weather for you.",
@@ -1760,6 +1788,7 @@ mod tests {
17601788
{
17611789
"index": 0,
17621790
"finish_reason": "stop",
1791+
"logprobs": null,
17631792
"message": {
17641793
"role": "assistant",
17651794
"content": "Hello!"
@@ -1844,6 +1873,7 @@ mod tests {
18441873
{
18451874
"index": 0,
18461875
"finish_reason": "stop",
1876+
"logprobs": null,
18471877
"message": {
18481878
"role": "assistant",
18491879
"content": "Let me check the weather for you",
@@ -1941,6 +1971,7 @@ mod tests {
19411971
{
19421972
"index": 0,
19431973
"finish_reason": "stop",
1974+
"logprobs": null,
19441975
"message": {
19451976
"role": "assistant",
19461977
"content": "Here's a poem with 100 tokens..."

0 commit comments

Comments
 (0)