diff --git a/src/api/providers/unbound.ts b/src/api/providers/unbound.ts index 20ec1b54bc..2a41d5416c 100644 --- a/src/api/providers/unbound.ts +++ b/src/api/providers/unbound.ts @@ -11,6 +11,8 @@ import { addCacheBreakpoints as addGeminiCacheBreakpoints } from "../transform/c import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from "../index" import { RouterProvider } from "./router-provider" +const ORIGIN_APP = "roo-code" + const DEFAULT_HEADERS = { "X-Unbound-Metadata": JSON.stringify({ labels: [{ key: "app", value: "roo-code" }] }), } @@ -20,6 +22,20 @@ interface UnboundUsage extends OpenAI.CompletionUsage { cache_read_input_tokens?: number } +type UnboundChatCompletionCreateParamsStreaming = OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming & { + unbound_metadata: { + originApp: string + taskId?: string + mode?: string + } +} + +type UnboundChatCompletionCreateParamsNonStreaming = OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming & { + unbound_metadata: { + originApp: string + } +} + export class UnboundHandler extends RouterProvider implements SingleCompletionHandler { constructor(options: ApiHandlerOptions) { super({ @@ -60,11 +76,16 @@ export class UnboundHandler extends RouterProvider implements SingleCompletionHa maxTokens = info.maxTokens ?? undefined } - const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming = { + const requestOptions: UnboundChatCompletionCreateParamsStreaming = { model: modelId.split("/")[1], max_tokens: maxTokens, messages: openAiMessages, stream: true, + unbound_metadata: { + originApp: ORIGIN_APP, + taskId: metadata?.taskId, + mode: metadata?.mode, + }, } if (this.supportsTemperature(modelId)) { @@ -108,9 +129,12 @@ export class UnboundHandler extends RouterProvider implements SingleCompletionHa const { id: modelId, info } = await this.fetchModel() try { - const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = { + const requestOptions: UnboundChatCompletionCreateParamsNonStreaming = { model: modelId.split("/")[1], messages: [{ role: "user", content: prompt }], + unbound_metadata: { + originApp: ORIGIN_APP, + }, } if (this.supportsTemperature(modelId)) {