Skip to content

Commit bce030d

Browse files
authored
Revert "fix: read max_output_tokens param from config" (openai#7088)
Reverts openai#4139
1 parent f4af6e3 commit bce030d

File tree

3 files changed

+5
-16
lines changed

3 files changed

+5
-16
lines changed

codex-rs/core/src/client.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -273,7 +273,6 @@ impl ModelClient {
273273
include,
274274
prompt_cache_key: Some(self.conversation_id.to_string()),
275275
text,
276-
max_output_tokens: self.config.model_max_output_tokens,
277276
};
278277

279278
let mut payload_json = serde_json::to_value(&payload)?;

codex-rs/core/src/client_common.rs

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -280,8 +280,6 @@ pub(crate) struct ResponsesApiRequest<'a> {
280280
pub(crate) prompt_cache_key: Option<String>,
281281
#[serde(skip_serializing_if = "Option::is_none")]
282282
pub(crate) text: Option<TextControls>,
283-
#[serde(skip_serializing_if = "Option::is_none")]
284-
pub(crate) max_output_tokens: Option<i64>,
285283
}
286284

287285
pub(crate) mod tools {
@@ -465,7 +463,6 @@ mod tests {
465463
verbosity: Some(OpenAiVerbosity::Low),
466464
format: None,
467465
}),
468-
max_output_tokens: Some(10_000),
469466
};
470467

471468
let v = serde_json::to_value(&req).expect("json");
@@ -504,7 +501,6 @@ mod tests {
504501
include: vec![],
505502
prompt_cache_key: None,
506503
text: Some(text_controls),
507-
max_output_tokens: Some(10_000),
508504
};
509505

510506
let v = serde_json::to_value(&req).expect("json");
@@ -541,7 +537,6 @@ mod tests {
541537
include: vec![],
542538
prompt_cache_key: None,
543539
text: None,
544-
max_output_tokens: Some(10_000),
545540
};
546541

547542
let v = serde_json::to_value(&req).expect("json");

codex-rs/core/tests/suite/compact_resume_fork.rs

Lines changed: 5 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -282,8 +282,7 @@ async fn compact_resume_and_fork_preserve_model_history_view() {
282282
"include": [
283283
"reasoning.encrypted_content"
284284
],
285-
"prompt_cache_key": prompt_cache_key,
286-
"max_output_tokens": 128000,
285+
"prompt_cache_key": prompt_cache_key
287286
});
288287
let compact_1 = json!(
289288
{
@@ -352,8 +351,7 @@ async fn compact_resume_and_fork_preserve_model_history_view() {
352351
"include": [
353352
"reasoning.encrypted_content"
354353
],
355-
"prompt_cache_key": prompt_cache_key,
356-
"max_output_tokens": 128000,
354+
"prompt_cache_key": prompt_cache_key
357355
});
358356
let user_turn_2_after_compact = json!(
359357
{
@@ -413,8 +411,7 @@ async fn compact_resume_and_fork_preserve_model_history_view() {
413411
"include": [
414412
"reasoning.encrypted_content"
415413
],
416-
"prompt_cache_key": prompt_cache_key,
417-
"max_output_tokens": 128000,
414+
"prompt_cache_key": prompt_cache_key
418415
});
419416
let usert_turn_3_after_resume = json!(
420417
{
@@ -494,8 +491,7 @@ async fn compact_resume_and_fork_preserve_model_history_view() {
494491
"include": [
495492
"reasoning.encrypted_content"
496493
],
497-
"prompt_cache_key": prompt_cache_key,
498-
"max_output_tokens": 128000,
494+
"prompt_cache_key": prompt_cache_key
499495
});
500496
let user_turn_3_after_fork = json!(
501497
{
@@ -575,8 +571,7 @@ async fn compact_resume_and_fork_preserve_model_history_view() {
575571
"include": [
576572
"reasoning.encrypted_content"
577573
],
578-
"prompt_cache_key": fork_prompt_cache_key,
579-
"max_output_tokens": 128000,
574+
"prompt_cache_key": fork_prompt_cache_key
580575
});
581576
let mut expected = json!([
582577
user_turn_1,

0 commit comments

Comments
 (0)