@@ -47,6 +47,7 @@ use crate::openai_tools::create_tools_json_for_responses_api;
4747use crate :: protocol:: RateLimitSnapshot ;
4848use crate :: protocol:: RateLimitWindow ;
4949use crate :: protocol:: TokenUsage ;
50+ use crate :: state:: TaskKind ;
5051use crate :: token_data:: PlanType ;
5152use crate :: util:: backoff;
5253use codex_otel:: otel_event_manager:: OtelEventManager ;
@@ -123,8 +124,16 @@ impl ModelClient {
123124 /// the provider config. Public callers always invoke `stream()` – the
124125 /// specialised helpers are private to avoid accidental misuse.
125126 pub async fn stream ( & self , prompt : & Prompt ) -> Result < ResponseStream > {
127+ self . stream_with_task_kind ( prompt, TaskKind :: Regular ) . await
128+ }
129+
130+ pub ( crate ) async fn stream_with_task_kind (
131+ & self ,
132+ prompt : & Prompt ,
133+ task_kind : TaskKind ,
134+ ) -> Result < ResponseStream > {
126135 match self . provider . wire_api {
127- WireApi :: Responses => self . stream_responses ( prompt) . await ,
136+ WireApi :: Responses => self . stream_responses ( prompt, task_kind ) . await ,
128137 WireApi :: Chat => {
129138 // Create the raw streaming connection first.
130139 let response_stream = stream_chat_completions (
@@ -165,7 +174,11 @@ impl ModelClient {
165174 }
166175
167176 /// Implementation for the OpenAI *Responses* experimental API.
168- async fn stream_responses ( & self , prompt : & Prompt ) -> Result < ResponseStream > {
177+ async fn stream_responses (
178+ & self ,
179+ prompt : & Prompt ,
180+ task_kind : TaskKind ,
181+ ) -> Result < ResponseStream > {
169182 if let Some ( path) = & * CODEX_RS_SSE_FIXTURE {
170183 // short circuit for tests
171184 warn ! ( path, "Streaming from fixture" ) ;
@@ -244,7 +257,7 @@ impl ModelClient {
244257 let max_attempts = self . provider . request_max_retries ( ) ;
245258 for attempt in 0 ..=max_attempts {
246259 match self
247- . attempt_stream_responses ( attempt, & payload_json, & auth_manager)
260+ . attempt_stream_responses ( attempt, & payload_json, & auth_manager, task_kind )
248261 . await
249262 {
250263 Ok ( stream) => {
@@ -272,6 +285,7 @@ impl ModelClient {
272285 attempt : u64 ,
273286 payload_json : & Value ,
274287 auth_manager : & Option < Arc < AuthManager > > ,
288+ task_kind : TaskKind ,
275289 ) -> std:: result:: Result < ResponseStream , StreamAttemptError > {
276290 // Always fetch the latest auth in case a prior attempt refreshed the token.
277291 let auth = auth_manager. as_ref ( ) . and_then ( |m| m. auth ( ) ) ;
@@ -294,6 +308,7 @@ impl ModelClient {
294308 . header ( "conversation_id" , self . conversation_id . to_string ( ) )
295309 . header ( "session_id" , self . conversation_id . to_string ( ) )
296310 . header ( reqwest:: header:: ACCEPT , "text/event-stream" )
311+ . header ( "Codex-Task-Type" , task_kind. header_value ( ) )
297312 . json ( payload_json) ;
298313
299314 if let Some ( auth) = auth. as_ref ( )
0 commit comments