Skip to content

Commit 9786a85

Browse files
committed
fix: improve LLM provider support and detection accuracy
- Remove Anthropic provider (incomplete implementation) - Use Claude via OpenRouter instead (anthropic/claude-sonnet-4.5) - Show active model in Settings screen for better visibility - Increase confidence to 1.0 for clearly safe inputs - Fix OpenAI temperature parameter (now 1 instead of 0) - Clean up version command output
1 parent 139f692 commit 9786a85

File tree

5 files changed

+28
-40
lines changed

5 files changed

+28
-40
lines changed

README.md

Lines changed: 2 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -147,13 +147,7 @@ guard := detector.New(
147147
detector.WithLLM(judge, detector.LLMConditional),
148148
)
149149

150-
// Anthropic - use any model (claude-3-opus, claude-3-sonnet, etc.)
151-
judge := detector.NewAnthropicJudge(apiKey, "claude-3-opus-20240229")
152-
guard := detector.New(
153-
detector.WithLLM(judge, detector.LLMAlways),
154-
)
155-
156-
// OpenRouter - use any provider/model combo
150+
// OpenRouter - use any provider/model combo (including Claude via anthropic/...)
157151
judge := detector.NewOpenRouterJudge(apiKey, "anthropic/claude-sonnet-4.5")
158152
guard := detector.New(
159153
detector.WithLLM(judge, detector.LLMConditional),
@@ -192,11 +186,8 @@ Create a `.env` file in your project directory:
192186
OPENAI_API_KEY=sk-...
193187
OPENAI_MODEL=gpt-5
194188

195-
# Anthropic (defaults to claude-sonnet-4-5-20250929 if not set)
196-
ANTHROPIC_API_KEY=sk-ant-...
197-
ANTHROPIC_MODEL=claude-sonnet-4-5-20250929
198-
199189
# OpenRouter (defaults to anthropic/claude-sonnet-4.5 if not set)
190+
# Provides access to Claude, Gemini, and other models
200191
OPENROUTER_API_KEY=sk-or-...
201192
OPENROUTER_MODEL=anthropic/claude-sonnet-4.5
202193

cmd/go-promptguard/interactive.go

Lines changed: 23 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -130,12 +130,6 @@ func initialModel() model {
130130
defaultProvider = "openrouter"
131131
}
132132
}
133-
if os.Getenv("ANTHROPIC_API_KEY") != "" {
134-
availableProviders = append(availableProviders, "anthropic")
135-
if defaultProvider == "none" {
136-
defaultProvider = "anthropic"
137-
}
138-
}
139133
availableProviders = append(availableProviders, "ollama")
140134
if defaultProvider == "none" {
141135
defaultProvider = "ollama"
@@ -212,12 +206,6 @@ func (m *model) updateGuard() {
212206
model = "anthropic/claude-sonnet-4.5"
213207
}
214208
judge = detector.NewOpenRouterJudge(os.Getenv("OPENROUTER_API_KEY"), model)
215-
case "anthropic":
216-
model := os.Getenv("ANTHROPIC_MODEL")
217-
if model == "" {
218-
model = "claude-sonnet-4-5-20250929"
219-
}
220-
judge = detector.NewAnthropicJudge(os.Getenv("ANTHROPIC_API_KEY"), model)
221209
case "ollama":
222210
ollamaHost := os.Getenv("OLLAMA_HOST")
223211
if ollamaHost == "" {
@@ -863,6 +851,29 @@ func (m model) viewSettings() string {
863851
}
864852
content.WriteString(selector(10, fmt.Sprintf("[0] Provider %s", providerDisplay)) + "\n")
865853

854+
// Show active model for current provider
855+
if m.llmProvider != "none" {
856+
var modelName string
857+
switch m.llmProvider {
858+
case "openai":
859+
modelName = os.Getenv("OPENAI_MODEL")
860+
if modelName == "" {
861+
modelName = "gpt-5 (default)"
862+
}
863+
case "openrouter":
864+
modelName = os.Getenv("OPENROUTER_MODEL")
865+
if modelName == "" {
866+
modelName = "anthropic/claude-sonnet-4.5 (default)"
867+
}
868+
case "ollama":
869+
modelName = os.Getenv("OLLAMA_MODEL")
870+
if modelName == "" {
871+
modelName = "llama3.1:8b (default)"
872+
}
873+
}
874+
content.WriteString(lipgloss.NewStyle().Foreground(mutedColor).Render(fmt.Sprintf(" Model: %s\n", modelName)))
875+
}
876+
866877
if len(m.availableProviders) > 1 {
867878
capitalizedProviders := make([]string, len(m.availableProviders))
868879
for i, p := range m.availableProviders {
@@ -997,8 +1008,6 @@ func capitalizeProviderName(p string) string {
9971008
return "OpenAI"
9981009
case "openrouter":
9991010
return "OpenRouter"
1000-
case "anthropic":
1001-
return "Anthropic"
10021011
case "ollama":
10031012
return "Ollama"
10041013
case "none":

detector/llm_constructors.go

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -47,15 +47,3 @@ func NewOllamaJudgeWithEndpoint(endpoint, model string, opts ...LLMJudgeOption)
4747
)
4848
}
4949

50-
// NewAnthropicJudge creates an LLM judge for Anthropic API.
51-
// Note: Anthropic's API format differs slightly from OpenAI.
52-
// Consider using OpenRouter for Anthropic models instead.
53-
// Recommended models: claude-sonnet-4-5-20250929, claude-haiku-4-5-20251001
54-
func NewAnthropicJudge(apiKey, model string, opts ...LLMJudgeOption) LLMJudge {
55-
return NewGenericLLMJudge(
56-
"https://api.anthropic.com/v1/messages",
57-
apiKey,
58-
model,
59-
opts...,
60-
)
61-
}

detector/llm_generic.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ func (j *GenericLLMJudge) Judge(ctx context.Context, input string) (LLMResult, e
8686
{"role": "system", "content": j.systemPrompt},
8787
{"role": "user", "content": buildUserPrompt(input)},
8888
},
89-
"temperature": 0, //* Deterministic output
89+
"temperature": 1,
9090
}
9191

9292
//* If structured mode, request JSON response

detector/multi_detector.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -140,7 +140,7 @@ func (md *MultiDetector) Detect(ctx context.Context, input string) Result {
140140
} else {
141141
// No detections after checking all detectors = high confidence it's safe
142142
// More detectors enabled = higher confidence
143-
finalConfidence = 0.85 + (0.05 * float64(len(md.detectors)) / 7.0)
143+
finalConfidence = 0.95 + (0.05 * float64(len(md.detectors)) / 7.0)
144144
if finalConfidence > 1.0 {
145145
finalConfidence = 1.0
146146
}
@@ -199,7 +199,7 @@ func (md *MultiDetector) Detect(ctx context.Context, input string) Result {
199199
}
200200
} else {
201201
// Still no detections even after LLM check = very high confidence it's safe
202-
finalConfidence = 0.90
202+
finalConfidence = 1.0
203203
}
204204
}
205205

0 commit comments

Comments
 (0)