1+ /*
2+ Copyright 2025 API Testing Authors.
3+
4+ Licensed under the Apache License, Version 2.0 (the "License");
5+ you may not use this file except in compliance with the License.
6+ You may obtain a copy of the License at
7+
8+ http://www.apache.org/licenses/LICENSE-2.0
9+
10+ Unless required by applicable law or agreed to in writing, software
11+ distributed under the License is distributed on an "AS IS" BASIS,
12+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+ See the License for the specific language governing permissions and
14+ limitations under the License.
15+ */
16+
17+ package testing
18+
19+ import (
20+ "context"
21+ "encoding/json"
22+ "fmt"
23+ "net/http"
24+ "os"
25+ "os/exec"
26+ "strings"
27+ "time"
28+ )
29+
30+ // ModelDetector detects available AI models on the local system
31+ type ModelDetector struct {
32+ client * http.Client
33+ }
34+
35+ // NewModelDetector creates a new model detector
36+ func NewModelDetector () * ModelDetector {
37+ return & ModelDetector {
38+ client : & http.Client {
39+ Timeout : 5 * time .Second ,
40+ },
41+ }
42+ }
43+
44+ // DetectedModel represents a detected AI model
45+ type DetectedModel struct {
46+ Name string `json:"name"`
47+ Provider string `json:"provider"`
48+ Size string `json:"size,omitempty"`
49+ Modified string `json:"modified,omitempty"`
50+ Metadata map [string ]string `json:"metadata,omitempty"`
51+ }
52+
53+ // DetectModels detects all available AI models on the system
54+ func (d * ModelDetector ) DetectModels (ctx context.Context ) ([]DetectedModel , error ) {
55+ var models []DetectedModel
56+
57+ // Try to detect Ollama models
58+ ollamaModels , err := d .detectOllamaModels (ctx )
59+ if err == nil {
60+ models = append (models , ollamaModels ... )
61+ }
62+
63+ // Try to detect other local models (e.g., llama.cpp)
64+ localModels , err := d .detectLocalModels (ctx )
65+ if err == nil {
66+ models = append (models , localModels ... )
67+ }
68+
69+ // Check for configured cloud models (from environment variables)
70+ cloudModels := d .detectCloudModels ()
71+ models = append (models , cloudModels ... )
72+
73+ if len (models ) == 0 {
74+ return nil , fmt .Errorf ("no AI models detected on the system" )
75+ }
76+
77+ return models , nil
78+ }
79+
80+ // detectOllamaModels detects Ollama models if Ollama is running
81+ func (d * ModelDetector ) detectOllamaModels (ctx context.Context ) ([]DetectedModel , error ) {
82+ // First check if Ollama is running
83+ resp , err := d .client .Get ("http://localhost:11434/api/tags" )
84+ if err != nil {
85+ return nil , fmt .Errorf ("ollama not available: %w" , err )
86+ }
87+ defer resp .Body .Close ()
88+
89+ if resp .StatusCode != http .StatusOK {
90+ return nil , fmt .Errorf ("ollama API returned status %d" , resp .StatusCode )
91+ }
92+
93+ var result struct {
94+ Models []struct {
95+ Name string `json:"name"`
96+ ModifiedAt time.Time `json:"modified_at"`
97+ Size int64 `json:"size"`
98+ Details struct {
99+ Format string `json:"format"`
100+ Family string `json:"family"`
101+ ParameterSize string `json:"parameter_size"`
102+ QuantizationLevel string `json:"quantization_level"`
103+ } `json:"details"`
104+ } `json:"models"`
105+ }
106+
107+ if err := json .NewDecoder (resp .Body ).Decode (& result ); err != nil {
108+ return nil , fmt .Errorf ("failed to decode ollama response: %w" , err )
109+ }
110+
111+ var models []DetectedModel
112+ for _ , m := range result .Models {
113+ model := DetectedModel {
114+ Name : m .Name ,
115+ Provider : "ollama" ,
116+ Size : formatBytes (m .Size ),
117+ Modified : m .ModifiedAt .Format ("2006-01-02 15:04:05" ),
118+ Metadata : map [string ]string {
119+ "format" : m .Details .Format ,
120+ "family" : m .Details .Family ,
121+ "parameter_size" : m .Details .ParameterSize ,
122+ "quantization_level" : m .Details .QuantizationLevel ,
123+ },
124+ }
125+ models = append (models , model )
126+ }
127+
128+ return models , nil
129+ }
130+
131+ // detectLocalModels detects other local model files
132+ func (d * ModelDetector ) detectLocalModels (ctx context.Context ) ([]DetectedModel , error ) {
133+ var models []DetectedModel
134+
135+ // Check for llama.cpp models
136+ if output , err := exec .CommandContext (ctx , "which" , "llama" ).Output (); err == nil && len (output ) > 0 {
137+ // Try to list models if llama CLI is available
138+ // This is a placeholder - actual implementation depends on the local setup
139+ models = append (models , DetectedModel {
140+ Name : "llama-local" ,
141+ Provider : "llama.cpp" ,
142+ Metadata : map [string ]string {
143+ "type" : "local" ,
144+ },
145+ })
146+ }
147+
148+ return models , nil
149+ }
150+
151+ // detectCloudModels detects configured cloud AI models from environment
152+ func (d * ModelDetector ) detectCloudModels () []DetectedModel {
153+ var models []DetectedModel
154+
155+ // Check for OpenAI configuration
156+ if apiKey := getEnvAny ("OPENAI_API_KEY" , "AI_OPENAI_KEY" ); apiKey != "" {
157+ models = append (models , DetectedModel {
158+ Name : "gpt-4" ,
159+ Provider : "openai" ,
160+ Metadata : map [string ]string {
161+ "type" : "cloud" ,
162+ "auth" : "configured" ,
163+ },
164+ })
165+ models = append (models , DetectedModel {
166+ Name : "gpt-3.5-turbo" ,
167+ Provider : "openai" ,
168+ Metadata : map [string ]string {
169+ "type" : "cloud" ,
170+ "auth" : "configured" ,
171+ },
172+ })
173+ }
174+
175+ // Check for Anthropic configuration
176+ if apiKey := getEnvAny ("ANTHROPIC_API_KEY" , "AI_ANTHROPIC_KEY" ); apiKey != "" {
177+ models = append (models , DetectedModel {
178+ Name : "claude-3-opus" ,
179+ Provider : "anthropic" ,
180+ Metadata : map [string ]string {
181+ "type" : "cloud" ,
182+ "auth" : "configured" ,
183+ },
184+ })
185+ models = append (models , DetectedModel {
186+ Name : "claude-3-sonnet" ,
187+ Provider : "anthropic" ,
188+ Metadata : map [string ]string {
189+ "type" : "cloud" ,
190+ "auth" : "configured" ,
191+ },
192+ })
193+ }
194+
195+ return models
196+ }
197+
198+ // GetDefaultModel returns the best available model based on detection
199+ func (d * ModelDetector ) GetDefaultModel (ctx context.Context ) (* DetectedModel , error ) {
200+ models , err := d .DetectModels (ctx )
201+ if err != nil {
202+ return nil , err
203+ }
204+
205+ if len (models ) == 0 {
206+ return nil , fmt .Errorf ("no models available" )
207+ }
208+
209+ // Preference order: Local models first (privacy), then cloud
210+ // 1. Prefer Ollama models
211+ for _ , m := range models {
212+ if m .Provider == "ollama" {
213+ return & m , nil
214+ }
215+ }
216+
217+ // 2. Then local models
218+ for _ , m := range models {
219+ if m .Provider == "llama.cpp" {
220+ return & m , nil
221+ }
222+ }
223+
224+ // 3. Finally cloud models
225+ return & models [0 ], nil
226+ }
227+
228+ // formatBytes formats bytes to human readable string
229+ func formatBytes (bytes int64 ) string {
230+ const unit = 1024
231+ if bytes < unit {
232+ return fmt .Sprintf ("%d B" , bytes )
233+ }
234+ div , exp := int64 (unit ), 0
235+ for n := bytes / unit ; n >= unit ; n /= unit {
236+ div *= unit
237+ exp ++
238+ }
239+ return fmt .Sprintf ("%.1f %cB" , float64 (bytes )/ float64 (div ), "KMGTPE" [exp ])
240+ }
241+
242+ // getEnvAny returns the first non-empty environment variable value
243+ func getEnvAny (keys ... string ) string {
244+ for _ , k := range keys {
245+ if v := strings .TrimSpace (os .Getenv (k )); v != "" {
246+ return v
247+ }
248+ }
249+ return ""
250+ }
0 commit comments