@@ -19,6 +19,7 @@ import { googleHelper } from "./provider/google"
1919import { openaiHelper } from "./provider/openai"
2020import { oaCompatHelper } from "./provider/openai-compatible"
2121import { createRateLimiter } from "./rateLimiter"
22+ import { createDataDumper } from "./dataDumper"
2223
2324type ZenData = Awaited < ReturnType < typeof ZenData . list > >
2425type RetryOptions = {
@@ -48,16 +49,19 @@ export async function handler(
4849 try {
4950 const url = input . request . url
5051 const body = await input . request . json ( )
51- const ip = input . request . headers . get ( "x-real-ip" ) ?? ""
5252 const model = opts . parseModel ( url , body )
5353 const isStream = opts . parseIsStream ( url , body )
54+ const ip = input . request . headers . get ( "x-real-ip" ) ?? ""
55+ const sessionId = input . request . headers . get ( "x-opencode-session" )
56+ const requestId = input . request . headers . get ( "x-opencode-request" )
5457 logger . metric ( {
5558 is_tream : isStream ,
56- session : input . request . headers . get ( "x-opencode-session" ) ,
57- request : input . request . headers . get ( "x-opencode-request" ) ,
59+ session : sessionId ,
60+ request : requestId ,
5861 } )
5962 const zenData = ZenData . list ( )
6063 const modelInfo = validateModel ( zenData , model )
64+ const dataDumper = createDataDumper ( sessionId , requestId )
6165 const rateLimiter = createRateLimiter ( modelInfo . id , modelInfo . rateLimit , ip )
6266 await rateLimiter ?. check ( )
6367
@@ -104,10 +108,14 @@ export async function handler(
104108 } )
105109 }
106110
107- return { providerInfo, authInfo, res, startTimestamp }
111+ return { providerInfo, authInfo, reqBody , res, startTimestamp }
108112 }
109113
110- const { providerInfo, authInfo, res, startTimestamp } = await retriableRequest ( )
114+ const { providerInfo, authInfo, reqBody, res, startTimestamp } = await retriableRequest ( )
115+
116+ // Store model request
117+ dataDumper ?. provideModel ( providerInfo . storeModel )
118+ dataDumper ?. provideRequest ( reqBody )
111119
112120 // Scrub response headers
113121 const resHeaders = new Headers ( )
@@ -126,6 +134,8 @@ export async function handler(
126134 const body = JSON . stringify ( responseConverter ( json ) )
127135 logger . metric ( { response_length : body . length } )
128136 logger . debug ( "RESPONSE: " + body )
137+ dataDumper ?. provideResponse ( body )
138+ dataDumper ?. flush ( )
129139 await rateLimiter ?. track ( )
130140 await trackUsage ( authInfo , modelInfo , providerInfo , json . usage )
131141 await reload ( authInfo )
@@ -155,6 +165,7 @@ export async function handler(
155165 response_length : responseLength ,
156166 "timestamp.last_byte" : Date . now ( ) ,
157167 } )
168+ dataDumper ?. flush ( )
158169 await rateLimiter ?. track ( )
159170 const usage = usageParser . retrieve ( )
160171 if ( usage ) {
@@ -174,6 +185,7 @@ export async function handler(
174185 }
175186 responseLength += value . length
176187 buffer += decoder . decode ( value , { stream : true } )
188+ dataDumper ?. provideStream ( buffer )
177189
178190 const parts = buffer . split ( providerInfo . streamSeparator )
179191 buffer = parts . pop ( ) ?? ""
0 commit comments