Skip to content

Commit 88a6200

Browse files
committed
feat: v1.3 - OpenAI tool streaming, Responses API function calls, thinking config, CORS, and more
Add OpenAI streaming tool call support with ContentBlockStart/Delta/Stop handling to emit ChunkToolCall events with incremental argument streaming. Add Responses API function_call and function_call_output input items with conversion to Anthropic ToolUse/ToolResult blocks, plus streaming events for FunctionCallArgumentsDelta and FunctionCallArgumentsDone. Add client-provided thinking configuration (ThinkingConfig with budget_tokens) respected in Anthropic-to-Google conversion. Add OpenAI response_format/JSON mode with system prompt injection, tool_choice conversion (auto/required/none/specific function), and image URL fallback for non-data URLs. Extend cache key to include max_tokens, top_p, top_k, stop_sequences for correct cache discrimination. Add CORS preflight handler and headers on all response types. Add 300s streaming frame timeout on all streaming loops. Replace hardcoded user agent with dynamic CARGO_PKG_VERSION. Replace timestamp-based request IDs with cryptographic random IDs.
1 parent 2b1fe5d commit 88a6200

File tree

10 files changed

+483
-74
lines changed

10 files changed

+483
-74
lines changed

src/auth/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ impl HttpClient {
106106
) -> Result<Vec<u8>, String> {
107107
let os = std::env::consts::OS;
108108
let arch = std::env::consts::ARCH;
109-
let user_agent = format!("antigravity/1.15.8 {}/{}", os, arch);
109+
let user_agent = format!("antigravity/{} {}/{}", env!("CARGO_PKG_VERSION"), os, arch);
110110

111111
let client_metadata = r#"{"ideType":"IDE_UNSPECIFIED","platform":"PLATFORM_UNSPECIFIED","pluginType":"GEMINI"}"#;
112112

@@ -151,7 +151,7 @@ impl HttpClient {
151151
) -> Result<Vec<u8>, String> {
152152
let os = std::env::consts::OS;
153153
let arch = std::env::consts::ARCH;
154-
let user_agent = format!("antigravity/1.15.8 {}/{}", os, arch);
154+
let user_agent = format!("antigravity/{} {}/{}", env!("CARGO_PKG_VERSION"), os, arch);
155155

156156
let client_metadata = r#"{"ideType":"IDE_UNSPECIFIED","platform":"PLATFORM_UNSPECIFIED","pluginType":"GEMINI"}"#;
157157

src/cache.rs

Lines changed: 70 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,13 +64,19 @@ impl ResponseCache {
6464
/// Generate a cache key from request parameters using SHA-256.
6565
///
6666
/// The key is a deterministic hash of the model, messages, system prompt,
67-
/// tools, and temperature. Returns a hex-encoded string (64 chars).
67+
/// tools, temperature, max_tokens, top_p, top_k, and stop_sequences.
68+
/// Returns a hex-encoded string (64 chars).
69+
#[allow(clippy::too_many_arguments)]
6870
pub fn make_key(
6971
model: &str,
7072
messages_json: &str,
7173
system_json: Option<&str>,
7274
tools_json: Option<&str>,
7375
temperature: Option<f32>,
76+
max_tokens: u32,
77+
top_p: Option<f32>,
78+
top_k: Option<u32>,
79+
stop_sequences: Option<&str>,
7480
) -> String {
7581
let mut hasher = Sha256::new();
7682
hasher.update(model.as_bytes());
@@ -88,6 +94,20 @@ impl ResponseCache {
8894
if let Some(temp) = temperature {
8995
hasher.update(temp.to_le_bytes());
9096
}
97+
hasher.update(b"|");
98+
hasher.update(max_tokens.to_le_bytes());
99+
hasher.update(b"|");
100+
if let Some(tp) = top_p {
101+
hasher.update(tp.to_le_bytes());
102+
}
103+
hasher.update(b"|");
104+
if let Some(tk) = top_k {
105+
hasher.update(tk.to_le_bytes());
106+
}
107+
hasher.update(b"|");
108+
if let Some(stop) = stop_sequences {
109+
hasher.update(stop.as_bytes());
110+
}
91111
let result = hasher.finalize();
92112
// Use a pre-allocated string and write hex directly (avoids per-byte format!)
93113
let mut hex = String::with_capacity(64);
@@ -211,7 +231,17 @@ mod tests {
211231
fn test_cache_basic_operations() {
212232
let mut cache = ResponseCache::new(true, 3600, 100);
213233

214-
let key = ResponseCache::make_key("claude-3", r#"[{"role":"user"}]"#, None, None, None);
234+
let key = ResponseCache::make_key(
235+
"claude-3",
236+
r#"[{"role":"user"}]"#,
237+
None,
238+
None,
239+
None,
240+
1024,
241+
None,
242+
None,
243+
None,
244+
);
215245
let response = b"test response".to_vec();
216246

217247
// Initially empty
@@ -276,13 +306,21 @@ mod tests {
276306
Some("system prompt"),
277307
None,
278308
Some(0.7),
309+
1024,
310+
None,
311+
None,
312+
None,
279313
);
280314
let key2 = ResponseCache::make_key(
281315
"claude-3",
282316
r#"[{"role":"user","content":"hello"}]"#,
283317
Some("system prompt"),
284318
None,
285319
Some(0.7),
320+
1024,
321+
None,
322+
None,
323+
None,
286324
);
287325
assert_eq!(key1, key2);
288326

@@ -293,6 +331,10 @@ mod tests {
293331
Some("system prompt"),
294332
None,
295333
Some(0.7),
334+
1024,
335+
None,
336+
None,
337+
None,
296338
);
297339
assert_ne!(key1, key3);
298340

@@ -303,6 +345,10 @@ mod tests {
303345
Some("system prompt"),
304346
None,
305347
Some(0.7),
348+
1024,
349+
None,
350+
None,
351+
None,
306352
);
307353
assert_ne!(key1, key4);
308354

@@ -313,6 +359,10 @@ mod tests {
313359
Some("system prompt"),
314360
None,
315361
Some(0.9),
362+
1024,
363+
None,
364+
None,
365+
None,
316366
);
317367
assert_ne!(key1, key5);
318368

@@ -323,9 +373,27 @@ mod tests {
323373
Some("different system"),
324374
None,
325375
Some(0.7),
376+
1024,
377+
None,
378+
None,
379+
None,
326380
);
327381
assert_ne!(key1, key6);
328382

383+
// Different max_tokens should produce different key
384+
let key7 = ResponseCache::make_key(
385+
"claude-3",
386+
r#"[{"role":"user","content":"hello"}]"#,
387+
Some("system prompt"),
388+
None,
389+
Some(0.7),
390+
2048,
391+
None,
392+
None,
393+
None,
394+
);
395+
assert_ne!(key1, key7);
396+
329397
// Key should be valid hex (64 chars for SHA-256)
330398
assert_eq!(key1.len(), 64);
331399
assert!(key1.chars().all(|c| c.is_ascii_hexdigit()));

src/cloudcode/request.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ use std::borrow::Cow;
33
use std::sync::LazyLock;
44

55
use crate::format::google::CloudCodeRequest;
6-
use crate::format::{MessagesRequest, convert_request};
6+
use crate::format::{convert_request, MessagesRequest};
77
use crate::models::{get_model_family, is_thinking_model};
88

99
const SYSTEM_INSTRUCTION: &str = "You are Antigravity, a powerful agentic AI coding assistant designed by the Google Deepmind team working on Advanced Agentic Coding.You are pair programming with a USER to solve their coding task. The task may require creating a new codebase, modifying or debugging an existing codebase, or simply answering a question.**Absolute paths only****Proactiveness**";
@@ -21,7 +21,7 @@ static SYSTEM_INSTRUCTION_IGNORE: LazyLock<String> = LazyLock::new(|| {
2121
static USER_AGENT: LazyLock<String> = LazyLock::new(|| {
2222
let os = std::env::consts::OS;
2323
let arch = std::env::consts::ARCH;
24-
format!("antigravity/1.15.8 {}/{}", os, arch)
24+
format!("antigravity/{} {}/{}", env!("CARGO_PKG_VERSION"), os, arch)
2525
});
2626

2727
pub fn build_headers(

src/format/anthropic.rs

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,19 @@ pub struct MessagesRequest {
2121
pub stop_sequences: Option<Vec<String>>,
2222
#[serde(skip_serializing_if = "Option::is_none")]
2323
pub tool_choice: Option<ToolChoice>,
24+
#[serde(skip_serializing_if = "Option::is_none")]
25+
pub thinking: Option<ThinkingConfig>,
26+
}
27+
28+
/// Client-provided thinking configuration.
29+
#[derive(Debug, Clone, Serialize, Deserialize)]
30+
#[serde(tag = "type", rename_all = "snake_case")]
31+
pub enum ThinkingConfig {
32+
Enabled {
33+
#[serde(skip_serializing_if = "Option::is_none")]
34+
budget_tokens: Option<u32>,
35+
},
36+
Disabled,
2437
}
2538

2639
#[derive(Debug, Clone, Serialize, Deserialize)]

src/format/openai.rs

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,8 @@ pub struct ChatCompletionRequest {
2525
pub n: Option<u32>,
2626
#[serde(skip_serializing_if = "Option::is_none")]
2727
pub user: Option<String>,
28+
#[serde(skip_serializing_if = "Option::is_none")]
29+
pub response_format: Option<ResponseFormat>,
2830
}
2931

3032
#[derive(Debug, Clone, Serialize, Deserialize)]
@@ -34,6 +36,16 @@ pub enum StopSequence {
3436
Multiple(Vec<String>),
3537
}
3638

39+
/// Response format specification for JSON mode.
40+
#[derive(Debug, Clone, Serialize, Deserialize)]
41+
pub struct ResponseFormat {
42+
#[serde(rename = "type")]
43+
pub format_type: String,
44+
/// JSON schema (for structured output mode)
45+
#[serde(skip_serializing_if = "Option::is_none")]
46+
pub json_schema: Option<serde_json::Value>,
47+
}
48+
3749
#[derive(Debug, Clone, Serialize, Deserialize)]
3850
pub struct ChatMessage {
3951
pub role: String,

src/format/openai_convert.rs

Lines changed: 56 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -134,6 +134,48 @@ pub fn openai_to_anthropic(request: &ChatCompletionRequest) -> MessagesRequest {
134134
.collect()
135135
});
136136

137+
// Convert tool_choice
138+
let tool_choice = request.tool_choice.as_ref().and_then(|tc| {
139+
match tc {
140+
serde_json::Value::String(s) => match s.as_str() {
141+
"auto" => Some(crate::format::anthropic::ToolChoice::Auto),
142+
"required" | "any" => Some(crate::format::anthropic::ToolChoice::Any),
143+
"none" => None, // No tool choice = don't use tools
144+
_ => None,
145+
},
146+
serde_json::Value::Object(obj) => {
147+
// {"type": "function", "function": {"name": "..."}}
148+
if let Some(func) = obj.get("function")
149+
&& let Some(name) = func.get("name").and_then(|n| n.as_str()) {
150+
return Some(crate::format::anthropic::ToolChoice::Tool {
151+
name: name.to_string(),
152+
});
153+
}
154+
None
155+
}
156+
_ => None,
157+
}
158+
});
159+
160+
// Handle response_format: inject JSON instruction into system prompt if JSON mode requested
161+
let system = if let Some(ref fmt) = request.response_format {
162+
if fmt.format_type == "json_object" {
163+
let json_instruction =
164+
"You must respond with valid JSON. Output only JSON, no other text.";
165+
match system {
166+
Some(SystemPrompt::Text(existing)) => {
167+
Some(SystemPrompt::Text(format!("{}\n\n{}", existing, json_instruction)))
168+
}
169+
None => Some(SystemPrompt::Text(json_instruction.to_string())),
170+
other => other,
171+
}
172+
} else {
173+
system
174+
}
175+
} else {
176+
system
177+
};
178+
137179
MessagesRequest {
138180
model: request.model.clone(),
139181
messages,
@@ -145,7 +187,8 @@ pub fn openai_to_anthropic(request: &ChatCompletionRequest) -> MessagesRequest {
145187
stop_sequences,
146188
stream: request.stream,
147189
tools,
148-
tool_choice: None,
190+
tool_choice,
191+
thinking: None,
149192
}
150193
}
151194

@@ -246,25 +289,30 @@ fn convert_chat_content(content: &ChatContent) -> MessageContent {
246289
ChatContent::Parts(parts) => {
247290
let blocks: Vec<ContentBlock> = parts
248291
.iter()
249-
.filter_map(|p| match p {
292+
.map(|p| match p {
250293
crate::format::openai::ChatContentPart::Text { text } => {
251-
Some(ContentBlock::Text {
294+
ContentBlock::Text {
252295
text: text.clone(),
253296
cache_control: None,
254-
})
297+
}
255298
}
256299
crate::format::openai::ChatContentPart::ImageUrl { image_url } => {
257300
// Try to parse data URL
258301
if let Some(data) = parse_data_url(&image_url.url) {
259-
Some(ContentBlock::Image {
302+
ContentBlock::Image {
260303
source: crate::format::anthropic::ImageSource {
261304
source_type: "base64".to_string(),
262305
media_type: data.0,
263306
data: data.1,
264307
},
265-
})
308+
}
266309
} else {
267-
None
310+
// Non-data URL: include as text reference since
311+
// the upstream API does not support URL-based images
312+
ContentBlock::Text {
313+
text: format!("[Image: {}]", image_url.url),
314+
cache_control: None,
315+
}
268316
}
269317
}
270318
})
@@ -324,6 +372,7 @@ mod tests {
324372
tool_choice: None,
325373
n: None,
326374
user: None,
375+
response_format: None,
327376
};
328377

329378
let anthropic = openai_to_anthropic(&request);

src/format/responses.rs

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,18 @@ pub enum ResponseInputItem {
5757
role: String,
5858
content: ResponseInputContent,
5959
},
60+
#[serde(rename = "function_call")]
61+
FunctionCall {
62+
id: Option<String>,
63+
call_id: Option<String>,
64+
name: Option<String>,
65+
arguments: Option<String>,
66+
},
67+
#[serde(rename = "function_call_output")]
68+
FunctionCallOutput {
69+
call_id: Option<String>,
70+
output: Option<String>,
71+
},
6072
#[serde(other)]
6173
Other,
6274
}
@@ -257,6 +269,15 @@ pub enum ResponseStreamEvent {
257269
text: String,
258270
},
259271

272+
#[serde(rename = "response.function_call_arguments.delta")]
273+
FunctionCallArgumentsDelta { output_index: usize, delta: String },
274+
275+
#[serde(rename = "response.function_call_arguments.done")]
276+
FunctionCallArgumentsDone {
277+
output_index: usize,
278+
arguments: String,
279+
},
280+
260281
#[serde(rename = "response.completed")]
261282
ResponseCompleted { response: Box<ResponsesResponse> },
262283
}

0 commit comments

Comments
 (0)