Skip to content

Commit c57c1a3

Browse files
committed
fix(security): [2.1] Fix PATH variable security issue in model detector
- Replace exec.CommandContext with 'which' command that relies on PATH variable - Use fixed, secure paths to check for llama binary in common installation directories - Only check predefined, unwriteable system directories: /usr/local/bin, /usr/bin, /opt/homebrew/bin, /opt/local/bin - Remove dependency on os/exec package to eliminate PATH-based security risks - Add binary_path metadata to track which secure path was used - Fix test compilation error by using detector variable Task: 2.1 - Fix PATH variable security vulnerability Phase: Security
1 parent a550626 commit c57c1a3

File tree

2 files changed

+23
-12
lines changed

2 files changed

+23
-12
lines changed

pkg/testing/model_detector.go

Lines changed: 22 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ import (
2222
"fmt"
2323
"net/http"
2424
"os"
25-
"os/exec"
2625
"strings"
2726
"time"
2827
)
@@ -132,17 +131,28 @@ func (d *ModelDetector) detectOllamaModels(ctx context.Context) ([]DetectedModel
132131
func (d *ModelDetector) detectLocalModels(ctx context.Context) ([]DetectedModel, error) {
133132
var models []DetectedModel
134133

135-
// Check for llama.cpp models
136-
if output, err := exec.CommandContext(ctx, "which", "llama").Output(); err == nil && len(output) > 0 {
137-
// Try to list models if llama CLI is available
138-
// This is a placeholder - actual implementation depends on the local setup
139-
models = append(models, DetectedModel{
140-
Name: "llama-local",
141-
Provider: "llama.cpp",
142-
Metadata: map[string]string{
143-
"type": "local",
144-
},
145-
})
134+
// Check for llama.cpp models in common installation directories
135+
// Use fixed, secure paths instead of relying on PATH variable
136+
llamaPaths := []string{
137+
"/usr/local/bin/llama",
138+
"/usr/bin/llama",
139+
"/opt/homebrew/bin/llama",
140+
"/opt/local/bin/llama",
141+
}
142+
143+
for _, llamaPath := range llamaPaths {
144+
if _, err := os.Stat(llamaPath); err == nil {
145+
// Found llama binary in a secure location
146+
models = append(models, DetectedModel{
147+
Name: "llama-local",
148+
Provider: "llama.cpp",
149+
Metadata: map[string]string{
150+
"type": "local",
151+
"binary_path": llamaPath,
152+
},
153+
})
154+
break // Only add once if found
155+
}
146156
}
147157

148158
return models, nil

pkg/testing/model_detector_test.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@ func TestModelDetector_DetectOllamaModels(t *testing.T) {
5050
detector := NewModelDetector()
5151
// Note: In real test, we would need to mock the localhost:11434 endpoint
5252
// This is just for demonstration
53+
_ = detector // Use the detector variable to avoid "declared and not used" error
5354
}
5455

5556
func TestModelDetector_DetectCloudModels(t *testing.T) {

0 commit comments

Comments
 (0)