diff --git a/config.yaml.example b/config.yaml.example index aecc68a..292e3d4 100644 --- a/config.yaml.example +++ b/config.yaml.example @@ -32,11 +32,31 @@ providers: openai: # API key for OpenAI # Can also be set via OPENAI_API_KEY environment variable - # api_key: "..." + # api_key: "sk-..." # Base URL for OpenAI API (can be changed for custom endpoints) # Can also be set via OPENAI_BASE_URL environment variable # base_url: "https://api.openai.com" + + # Google Gemini configuration + gemini: + # API key for Google Gemini + # Can also be set via GEMINI_API_KEY environment variable + # api_key: "AIza..." + + # Base URL for Gemini API + # Can also be set via GEMINI_BASE_URL environment variable + # base_url: "https://generativelanguage.googleapis.com" + + # OpenRouter configuration + openrouter: + # API key for OpenRouter + # Can also be set via OPENROUTER_API_KEY environment variable + # api_key: "sk-or-v1-..." + + # Base URL for OpenRouter API + # Can also be set via OPENROUTER_BASE_URL environment variable + # base_url: "https://openrouter.ai/api" # Storage configuration storage: @@ -56,14 +76,17 @@ subagents: # Maps subagent types to specific models # Only used when enable: true mappings: - # Code review specialist (example) + # Code review specialist - routes to OpenAI (example) # code-reviewer: "gpt-4o" - # Data analysis expert (example) - # data-analyst: "o3" + # Data analysis expert - routes to Gemini (example) + # data-analyst: "gemini-1.5-pro" - # Documentation writer (example) - # doc-writer: "gpt-3.5-turbo" + # Documentation writer - routes to OpenRouter (example) + # doc-writer: "openrouter/claude-3-haiku-20240307" + + # Deep reasoning specialist - routes to OpenRouter (example) + # deep-reasoning: "openrouter/o3-mini" # Environment variable overrides: # The following environment variables will override the YAML configuration: @@ -83,6 +106,14 @@ subagents: # OPENAI_API_KEY - OpenAI API key # OPENAI_BASE_URL - OpenAI base URL # +# Gemini: +# GEMINI_API_KEY - Google Gemini API key +# GEMINI_BASE_URL - Gemini base URL +# +# OpenRouter: +# OPENROUTER_API_KEY - OpenRouter API key +# OPENROUTER_BASE_URL - OpenRouter base URL +# # Storage: # DB_PATH - Database file path # diff --git a/proxy/cmd/proxy/main.go b/proxy/cmd/proxy/main.go index 8623203..9043caf 100644 --- a/proxy/cmd/proxy/main.go +++ b/proxy/cmd/proxy/main.go @@ -31,6 +31,8 @@ func main() { providers := make(map[string]provider.Provider) providers["anthropic"] = provider.NewAnthropicProvider(&cfg.Providers.Anthropic) providers["openai"] = provider.NewOpenAIProvider(&cfg.Providers.OpenAI) + providers["gemini"] = provider.NewGeminiProvider(&cfg.Providers.Gemini) + providers["openrouter"] = provider.NewOpenRouterProvider(&cfg.Providers.OpenRouter) // Initialize model router modelRouter := service.NewModelRouter(cfg, providers, logger) diff --git a/proxy/internal/config/config.go b/proxy/internal/config/config.go index eb3e6bb..e63b84f 100644 --- a/proxy/internal/config/config.go +++ b/proxy/internal/config/config.go @@ -34,8 +34,10 @@ type TimeoutsConfig struct { } type ProvidersConfig struct { - Anthropic AnthropicProviderConfig `yaml:"anthropic"` - OpenAI OpenAIProviderConfig `yaml:"openai"` + Anthropic AnthropicProviderConfig `yaml:"anthropic"` + OpenAI OpenAIProviderConfig `yaml:"openai"` + Gemini GeminiProviderConfig `yaml:"gemini"` + OpenRouter OpenRouterProviderConfig `yaml:"openrouter"` } type AnthropicProviderConfig struct { @@ -49,6 +51,16 @@ type OpenAIProviderConfig struct { APIKey string `yaml:"api_key"` } +type GeminiProviderConfig struct { + BaseURL string `yaml:"base_url"` + APIKey string `yaml:"api_key"` +} + +type OpenRouterProviderConfig struct { + BaseURL string `yaml:"base_url"` + APIKey string `yaml:"api_key"` +} + type AnthropicConfig struct { BaseURL string Version string @@ -95,6 +107,14 @@ func Load() (*Config, error) { BaseURL: "https://api.openai.com", APIKey: "", }, + Gemini: GeminiProviderConfig{ + BaseURL: "https://generativelanguage.googleapis.com", + APIKey: "", + }, + OpenRouter: OpenRouterProviderConfig{ + BaseURL: "https://openrouter.ai/api", + APIKey: "", + }, }, Storage: StorageConfig{ DBPath: "requests.db", @@ -154,6 +174,22 @@ func Load() (*Config, error) { if envKey := os.Getenv("OPENAI_API_KEY"); envKey != "" { cfg.Providers.OpenAI.APIKey = envKey } + + // Override Gemini settings + if envURL := os.Getenv("GEMINI_BASE_URL"); envURL != "" { + cfg.Providers.Gemini.BaseURL = envURL + } + if envKey := os.Getenv("GEMINI_API_KEY"); envKey != "" { + cfg.Providers.Gemini.APIKey = envKey + } + + // Override OpenRouter settings + if envURL := os.Getenv("OPENROUTER_BASE_URL"); envURL != "" { + cfg.Providers.OpenRouter.BaseURL = envURL + } + if envKey := os.Getenv("OPENROUTER_API_KEY"); envKey != "" { + cfg.Providers.OpenRouter.APIKey = envKey + } // Override storage settings if envPath := os.Getenv("DB_PATH"); envPath != "" { diff --git a/proxy/internal/provider/gemini.go b/proxy/internal/provider/gemini.go new file mode 100644 index 0000000..88543be --- /dev/null +++ b/proxy/internal/provider/gemini.go @@ -0,0 +1,351 @@ +package provider + +import ( + "bufio" + "bytes" + "compress/gzip" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strings" + "time" + + "github.com/seifghazi/claude-code-monitor/internal/config" + "github.com/seifghazi/claude-code-monitor/internal/model" +) + +type GeminiProvider struct { + client *http.Client + config *config.GeminiProviderConfig +} + +func NewGeminiProvider(cfg *config.GeminiProviderConfig) Provider { + return &GeminiProvider{ + client: &http.Client{ + Timeout: 300 * time.Second, // 5 minutes timeout + }, + config: cfg, + } +} + +func (p *GeminiProvider) Name() string { + return "gemini" +} + +func (p *GeminiProvider) ForwardRequest(ctx context.Context, originalReq *http.Request) (*http.Response, error) { + // First, we need to convert the Anthropic request to Gemini format + bodyBytes, err := io.ReadAll(originalReq.Body) + if err != nil { + return nil, fmt.Errorf("failed to read request body: %w", err) + } + originalReq.Body = io.NopCloser(bytes.NewReader(bodyBytes)) + + var anthropicReq model.AnthropicRequest + if err := json.Unmarshal(bodyBytes, &anthropicReq); err != nil { + return nil, fmt.Errorf("failed to parse anthropic request: %w", err) + } + + // Convert to Gemini format + geminiReq := convertAnthropicToGemini(&anthropicReq) + newBodyBytes, err := json.Marshal(geminiReq) + if err != nil { + return nil, fmt.Errorf("failed to marshal gemini request: %w", err) + } + + // Clone the request with new body + proxyReq := originalReq.Clone(ctx) + proxyReq.Body = io.NopCloser(bytes.NewReader(newBodyBytes)) + proxyReq.ContentLength = int64(len(newBodyBytes)) + + // Parse the configured base URL + baseURL, err := url.Parse(p.config.BaseURL) + if err != nil { + return nil, fmt.Errorf("failed to parse base URL '%s': %w", p.config.BaseURL, err) + } + + // Determine the model to use + modelName := "gemini-1.5-pro-latest" // Default model + if anthropicReq.Model != "" { + // Map Anthropic models to Gemini models + if strings.Contains(anthropicReq.Model, "opus") || strings.Contains(anthropicReq.Model, "sonnet") { + modelName = "gemini-1.5-pro-latest" + } else if strings.Contains(anthropicReq.Model, "haiku") { + modelName = "gemini-1.5-flash-latest" + } + } + + // Update the destination URL for Gemini + proxyReq.URL.Scheme = baseURL.Scheme + proxyReq.URL.Host = baseURL.Host + proxyReq.URL.Path = fmt.Sprintf("/v1beta/models/%s:streamGenerateContent", modelName) + + // Add API key as query parameter + q := proxyReq.URL.Query() + q.Set("key", p.config.APIKey) + proxyReq.URL.RawQuery = q.Encode() + + // Update request headers + proxyReq.RequestURI = "" + proxyReq.Host = baseURL.Host + + // Remove Anthropic-specific headers + proxyReq.Header.Del("anthropic-version") + proxyReq.Header.Del("x-api-key") + + // Set Gemini headers + proxyReq.Header.Set("Content-Type", "application/json") + + // Execute the request + resp, err := p.client.Do(proxyReq) + if err != nil { + return nil, fmt.Errorf("failed to forward request to Gemini: %w", err) + } + + // Handle streaming response + if anthropicReq.Stream { + return p.handleStreamingResponse(resp) + } + + // Handle regular response + return p.handleRegularResponse(resp) +} + +func (p *GeminiProvider) handleStreamingResponse(geminiResp *http.Response) (*http.Response, error) { + // Read Gemini response + defer geminiResp.Body.Close() + + // Create a pipe for streaming + pr, pw := io.Pipe() + + // Create response with SSE headers + resp := &http.Response{ + StatusCode: geminiResp.StatusCode, + Header: make(http.Header), + Body: pr, + } + + // Set SSE headers + resp.Header.Set("Content-Type", "text/event-stream") + resp.Header.Set("Cache-Control", "no-cache") + resp.Header.Set("Connection", "keep-alive") + + // Start streaming conversion in a goroutine + go func() { + defer pw.Close() + + reader := bufio.NewReader(geminiResp.Body) + for { + line, err := reader.ReadBytes('\n') + if err != nil { + if err != io.EOF { + fmt.Fprintf(pw, "event: error\ndata: %s\n\n", err.Error()) + } + break + } + + // Parse Gemini streaming response and convert to Anthropic SSE format + if bytes.HasPrefix(line, []byte("data: ")) { + data := bytes.TrimPrefix(line, []byte("data: ")) + data = bytes.TrimSpace(data) + + if string(data) == "[DONE]" { + fmt.Fprintf(pw, "event: message_stop\ndata: {\"type\":\"message_stop\"}\n\n") + break + } + + var geminiChunk map[string]interface{} + if err := json.Unmarshal(data, &geminiChunk); err == nil { + // Convert Gemini chunk to Anthropic format + anthropicEvent := convertGeminiChunkToAnthropicEvent(geminiChunk) + eventData, _ := json.Marshal(anthropicEvent) + fmt.Fprintf(pw, "event: %s\ndata: %s\n\n", anthropicEvent["type"], eventData) + } + } + } + }() + + return resp, nil +} + +func (p *GeminiProvider) handleRegularResponse(geminiResp *http.Response) (*http.Response, error) { + defer geminiResp.Body.Close() + + // Handle compression + var reader io.Reader = geminiResp.Body + if geminiResp.Header.Get("Content-Encoding") == "gzip" { + gzReader, err := gzip.NewReader(geminiResp.Body) + if err != nil { + return nil, fmt.Errorf("failed to create gzip reader: %w", err) + } + defer gzReader.Close() + reader = gzReader + } + + bodyBytes, err := io.ReadAll(reader) + if err != nil { + return nil, fmt.Errorf("failed to read Gemini response: %w", err) + } + + // Parse Gemini response + var geminiResp2 map[string]interface{} + if err := json.Unmarshal(bodyBytes, &geminiResp2); err != nil { + return nil, fmt.Errorf("failed to parse Gemini response: %w", err) + } + + // Convert to Anthropic format + anthropicResp := convertGeminiToAnthropicResponse(geminiResp2) + newBodyBytes, err := json.Marshal(anthropicResp) + if err != nil { + return nil, fmt.Errorf("failed to marshal anthropic response: %w", err) + } + + // Create new response + resp := &http.Response{ + StatusCode: geminiResp.StatusCode, + Header: make(http.Header), + Body: io.NopCloser(bytes.NewReader(newBodyBytes)), + } + + // Copy relevant headers + resp.Header.Set("Content-Type", "application/json") + resp.Header.Set("Content-Length", fmt.Sprintf("%d", len(newBodyBytes))) + + return resp, nil +} + +func convertAnthropicToGemini(anthropicReq *model.AnthropicRequest) map[string]interface{} { + geminiReq := map[string]interface{}{ + "contents": []map[string]interface{}{}, + "generationConfig": map[string]interface{}{ + "temperature": anthropicReq.Temperature, + "maxOutputTokens": anthropicReq.MaxTokens, + }, + } + + // Convert messages + for _, msg := range anthropicReq.Messages { + role := "user" + if msg.Role == "assistant" { + role = "model" + } + + parts := []map[string]interface{}{} + + // Handle different content types + switch content := msg.Content.(type) { + case string: + parts = append(parts, map[string]interface{}{ + "text": content, + }) + case []interface{}: + for _, item := range content { + if itemMap, ok := item.(map[string]interface{}); ok { + if itemMap["type"] == "text" { + parts = append(parts, map[string]interface{}{ + "text": itemMap["text"], + }) + } + } + } + } + + geminiReq["contents"] = append(geminiReq["contents"].([]map[string]interface{}), map[string]interface{}{ + "role": role, + "parts": parts, + }) + } + + // Add system instruction if present + if len(anthropicReq.System) > 0 { + systemText := "" + for _, sysMsg := range anthropicReq.System { + systemText += sysMsg.Text + "\n" + } + if systemText != "" { + geminiReq["systemInstruction"] = map[string]interface{}{ + "parts": []map[string]interface{}{ + {"text": strings.TrimSpace(systemText)}, + }, + } + } + } + + return geminiReq +} + +func convertGeminiToAnthropicResponse(geminiResp map[string]interface{}) map[string]interface{} { + anthropicResp := map[string]interface{}{ + "id": fmt.Sprintf("msg_%d", time.Now().Unix()), + "type": "message", + "role": "assistant", + "content": []map[string]interface{}{}, + "model": "claude-3-5-sonnet-20241022", // Fake it as Claude + "usage": map[string]interface{}{ + "input_tokens": 0, + "output_tokens": 0, + }, + } + + // Extract content from Gemini response + if candidates, ok := geminiResp["candidates"].([]interface{}); ok && len(candidates) > 0 { + if candidate, ok := candidates[0].(map[string]interface{}); ok { + if content, ok := candidate["content"].(map[string]interface{}); ok { + if parts, ok := content["parts"].([]interface{}); ok { + for _, part := range parts { + if partMap, ok := part.(map[string]interface{}); ok { + if text, ok := partMap["text"].(string); ok { + anthropicResp["content"] = append(anthropicResp["content"].([]map[string]interface{}), map[string]interface{}{ + "type": "text", + "text": text, + }) + } + } + } + } + } + } + } + + // Extract usage if available + if usageMetadata, ok := geminiResp["usageMetadata"].(map[string]interface{}); ok { + if promptTokens, ok := usageMetadata["promptTokenCount"].(float64); ok { + anthropicResp["usage"].(map[string]interface{})["input_tokens"] = int(promptTokens) + } + if candidateTokens, ok := usageMetadata["candidatesTokenCount"].(float64); ok { + anthropicResp["usage"].(map[string]interface{})["output_tokens"] = int(candidateTokens) + } + } + + return anthropicResp +} + +func convertGeminiChunkToAnthropicEvent(geminiChunk map[string]interface{}) map[string]interface{} { + // This is a simplified conversion - you may need to adjust based on actual Gemini streaming format + event := map[string]interface{}{ + "type": "content_block_delta", + "delta": map[string]interface{}{ + "type": "text_delta", + "text": "", + }, + } + + // Extract text from Gemini chunk + if candidates, ok := geminiChunk["candidates"].([]interface{}); ok && len(candidates) > 0 { + if candidate, ok := candidates[0].(map[string]interface{}); ok { + if content, ok := candidate["content"].(map[string]interface{}); ok { + if parts, ok := content["parts"].([]interface{}); ok && len(parts) > 0 { + if part, ok := parts[0].(map[string]interface{}); ok { + if text, ok := part["text"].(string); ok { + event["delta"].(map[string]interface{})["text"] = text + } + } + } + } + } + } + + return event +} \ No newline at end of file diff --git a/proxy/internal/provider/openrouter.go b/proxy/internal/provider/openrouter.go new file mode 100644 index 0000000..ddbc716 --- /dev/null +++ b/proxy/internal/provider/openrouter.go @@ -0,0 +1,334 @@ +package provider + +import ( + "bufio" + "bytes" + "compress/gzip" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strings" + "time" + + "github.com/seifghazi/claude-code-monitor/internal/config" + "github.com/seifghazi/claude-code-monitor/internal/model" +) + +type OpenRouterProvider struct { + client *http.Client + config *config.OpenRouterProviderConfig +} + +func NewOpenRouterProvider(cfg *config.OpenRouterProviderConfig) Provider { + return &OpenRouterProvider{ + client: &http.Client{ + Timeout: 300 * time.Second, // 5 minutes timeout + }, + config: cfg, + } +} + +func (p *OpenRouterProvider) Name() string { + return "openrouter" +} + +func (p *OpenRouterProvider) ForwardRequest(ctx context.Context, originalReq *http.Request) (*http.Response, error) { + // First, we need to convert the Anthropic request to OpenRouter format + bodyBytes, err := io.ReadAll(originalReq.Body) + if err != nil { + return nil, fmt.Errorf("failed to read request body: %w", err) + } + originalReq.Body = io.NopCloser(bytes.NewReader(bodyBytes)) + + var anthropicReq model.AnthropicRequest + if err := json.Unmarshal(bodyBytes, &anthropicReq); err != nil { + return nil, fmt.Errorf("failed to parse anthropic request: %w", err) + } + + // Convert to OpenRouter format (similar to OpenAI format) + openRouterReq := convertAnthropicToOpenRouter(&anthropicReq) + newBodyBytes, err := json.Marshal(openRouterReq) + if err != nil { + return nil, fmt.Errorf("failed to marshal openrouter request: %w", err) + } + + // Clone the request with new body + proxyReq := originalReq.Clone(ctx) + proxyReq.Body = io.NopCloser(bytes.NewReader(newBodyBytes)) + proxyReq.ContentLength = int64(len(newBodyBytes)) + + // Parse the configured base URL + baseURL, err := url.Parse(p.config.BaseURL) + if err != nil { + return nil, fmt.Errorf("failed to parse base URL '%s': %w", p.config.BaseURL, err) + } + + // Update the destination URL for OpenRouter + proxyReq.URL.Scheme = baseURL.Scheme + proxyReq.URL.Host = baseURL.Host + proxyReq.URL.Path = "/v1/chat/completions" // OpenRouter endpoint + + // Update request headers + proxyReq.RequestURI = "" + proxyReq.Host = baseURL.Host + + // Remove Anthropic-specific headers + proxyReq.Header.Del("anthropic-version") + proxyReq.Header.Del("x-api-key") + + // Set OpenRouter headers + proxyReq.Header.Set("Authorization", fmt.Sprintf("Bearer %s", p.config.APIKey)) + proxyReq.Header.Set("Content-Type", "application/json") + + // Optional: Set HTTP-Referer for OpenRouter analytics + proxyReq.Header.Set("HTTP-Referer", "https://github.com/seifghazi/claude-code-proxy") + + // Optional: Set X-Title for OpenRouter dashboard + proxyReq.Header.Set("X-Title", "Claude Code Proxy") + + // Execute the request + resp, err := p.client.Do(proxyReq) + if err != nil { + return nil, fmt.Errorf("failed to forward request to OpenRouter: %w", err) + } + + // Handle streaming response + if anthropicReq.Stream { + return p.handleStreamingResponse(resp) + } + + // Handle regular response + return p.handleRegularResponse(resp) +} + +func (p *OpenRouterProvider) handleStreamingResponse(openRouterResp *http.Response) (*http.Response, error) { + // Create a pipe for streaming + pr, pw := io.Pipe() + + // Create response with SSE headers + resp := &http.Response{ + StatusCode: openRouterResp.StatusCode, + Header: make(http.Header), + Body: pr, + } + + // Set SSE headers + resp.Header.Set("Content-Type", "text/event-stream") + resp.Header.Set("Cache-Control", "no-cache") + resp.Header.Set("Connection", "keep-alive") + + // Start streaming conversion in a goroutine + go func() { + defer pw.Close() + defer openRouterResp.Body.Close() + + scanner := bufio.NewScanner(openRouterResp.Body) + for scanner.Scan() { + line := scanner.Text() + + // OpenRouter uses SSE format similar to OpenAI + if strings.HasPrefix(line, "data: ") { + data := strings.TrimPrefix(line, "data: ") + data = strings.TrimSpace(data) + + if data == "[DONE]" { + // Send Anthropic-style stop event + fmt.Fprintf(pw, "event: message_stop\ndata: {\"type\":\"message_stop\"}\n\n") + break + } + + var openRouterChunk map[string]interface{} + if err := json.Unmarshal([]byte(data), &openRouterChunk); err == nil { + // Convert OpenRouter chunk to Anthropic format + anthropicEvent := convertOpenRouterChunkToAnthropicEvent(openRouterChunk) + eventData, _ := json.Marshal(anthropicEvent) + fmt.Fprintf(pw, "event: %s\ndata: %s\n\n", anthropicEvent["type"], eventData) + } + } + } + }() + + return resp, nil +} + +func (p *OpenRouterProvider) handleRegularResponse(openRouterResp *http.Response) (*http.Response, error) { + defer openRouterResp.Body.Close() + + // Handle compression + var reader io.Reader = openRouterResp.Body + if openRouterResp.Header.Get("Content-Encoding") == "gzip" { + gzReader, err := gzip.NewReader(openRouterResp.Body) + if err != nil { + return nil, fmt.Errorf("failed to create gzip reader: %w", err) + } + defer gzReader.Close() + reader = gzReader + } + + bodyBytes, err := io.ReadAll(reader) + if err != nil { + return nil, fmt.Errorf("failed to read OpenRouter response: %w", err) + } + + // Parse OpenRouter response + var openRouterResp2 map[string]interface{} + if err := json.Unmarshal(bodyBytes, &openRouterResp2); err != nil { + return nil, fmt.Errorf("failed to parse OpenRouter response: %w", err) + } + + // Convert to Anthropic format + anthropicResp := convertOpenRouterToAnthropicResponse(openRouterResp2) + newBodyBytes, err := json.Marshal(anthropicResp) + if err != nil { + return nil, fmt.Errorf("failed to marshal anthropic response: %w", err) + } + + // Create new response + resp := &http.Response{ + StatusCode: openRouterResp.StatusCode, + Header: make(http.Header), + Body: io.NopCloser(bytes.NewReader(newBodyBytes)), + } + + // Copy relevant headers + resp.Header.Set("Content-Type", "application/json") + resp.Header.Set("Content-Length", fmt.Sprintf("%d", len(newBodyBytes))) + + return resp, nil +} + +func convertAnthropicToOpenRouter(anthropicReq *model.AnthropicRequest) map[string]interface{} { + openRouterReq := map[string]interface{}{ + "model": anthropicReq.Model, + "messages": []map[string]interface{}{}, + "max_tokens": anthropicReq.MaxTokens, + "temperature": anthropicReq.Temperature, + "stream": anthropicReq.Stream, + } + + // Add system message if present + if len(anthropicReq.System) > 0 { + systemText := "" + for _, sysMsg := range anthropicReq.System { + systemText += sysMsg.Text + "\n" + } + if systemText != "" { + openRouterReq["messages"] = append(openRouterReq["messages"].([]map[string]interface{}), map[string]interface{}{ + "role": "system", + "content": strings.TrimSpace(systemText), + }) + } + } + + // Convert messages + for _, msg := range anthropicReq.Messages { + message := map[string]interface{}{ + "role": msg.Role, + } + + // Handle different content types + switch content := msg.Content.(type) { + case string: + message["content"] = content + case []interface{}: + // For multi-part content, convert to OpenRouter format + var textContent string + for _, item := range content { + if itemMap, ok := item.(map[string]interface{}); ok { + if itemMap["type"] == "text" { + if text, ok := itemMap["text"].(string); ok { + textContent += text + } + } + // Note: OpenRouter supports image inputs for some models + // You can extend this to handle image content if needed + } + } + message["content"] = textContent + } + + openRouterReq["messages"] = append(openRouterReq["messages"].([]map[string]interface{}), message) + } + + // Add optional parameters + // Note: TopP, TopK, and StopSequences are not available in the current AnthropicRequest model + // You can add them if needed in the future + + // OpenRouter specific: Add provider preferences if needed + // This allows fallback to different providers + openRouterReq["route"] = "fallback" + + return openRouterReq +} + +func convertOpenRouterToAnthropicResponse(openRouterResp map[string]interface{}) map[string]interface{} { + anthropicResp := map[string]interface{}{ + "id": openRouterResp["id"], + "type": "message", + "role": "assistant", + "content": []map[string]interface{}{}, + "model": "claude-3-5-sonnet-20241022", // Fake it as Claude + "usage": map[string]interface{}{ + "input_tokens": 0, + "output_tokens": 0, + }, + } + + // Extract content from OpenRouter response + if choices, ok := openRouterResp["choices"].([]interface{}); ok && len(choices) > 0 { + if choice, ok := choices[0].(map[string]interface{}); ok { + if message, ok := choice["message"].(map[string]interface{}); ok { + if content, ok := message["content"].(string); ok { + anthropicResp["content"] = append(anthropicResp["content"].([]map[string]interface{}), map[string]interface{}{ + "type": "text", + "text": content, + }) + } + } + } + } + + // Extract usage information + if usage, ok := openRouterResp["usage"].(map[string]interface{}); ok { + if promptTokens, ok := usage["prompt_tokens"].(float64); ok { + anthropicResp["usage"].(map[string]interface{})["input_tokens"] = int(promptTokens) + } + if completionTokens, ok := usage["completion_tokens"].(float64); ok { + anthropicResp["usage"].(map[string]interface{})["output_tokens"] = int(completionTokens) + } + } + + return anthropicResp +} + +func convertOpenRouterChunkToAnthropicEvent(openRouterChunk map[string]interface{}) map[string]interface{} { + // Default event type + event := map[string]interface{}{ + "type": "content_block_delta", + "delta": map[string]interface{}{ + "type": "text_delta", + "text": "", + }, + } + + // Extract text from OpenRouter chunk + if choices, ok := openRouterChunk["choices"].([]interface{}); ok && len(choices) > 0 { + if choice, ok := choices[0].(map[string]interface{}); ok { + if delta, ok := choice["delta"].(map[string]interface{}); ok { + if content, ok := delta["content"].(string); ok { + event["delta"].(map[string]interface{})["text"] = content + } + } + + // Check for finish reason + if finishReason, ok := choice["finish_reason"].(string); ok && finishReason != "" { + event["type"] = "message_stop" + } + } + } + + return event +} \ No newline at end of file