Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 6 additions & 4 deletions bifrost/bifrost.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ const (
type LLM struct {
client *bifrostcore.Bifrost
provider schemas.ModelProvider
apiKey string // used only to redact configured secrets from provider error surfaces
defaultModel string
Comment on lines +36 to 37
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Don't drop Bedrock credentials from the redaction set.

LLM now retains only cfg.APIKey, but this config also accepts AWSAccessKeyID and AWSSecretAccessKey. Those never reach llm.SanitizeProviderError, so a Bedrock auth error that echoes either value will still leak it through the new streamed error paths. Please carry a full secret list here instead of a single API key.

🔐 Suggested direction
 type LLM struct {
 	client           *bifrostcore.Bifrost
 	provider         schemas.ModelProvider
-	apiKey           string // used only to redact configured secrets from provider error surfaces
+	redactionSecrets []string
 	defaultModel     string
 	inputTokenLimit  int
 	outputTokenLimit int
 	streamingTimeout time.Duration
@@
 	return &LLM{
 		client:             client,
 		provider:           cfg.Provider,
-		apiKey:             cfg.APIKey,
+		redactionSecrets: []string{
+			cfg.APIKey,
+			cfg.AWSAccessKeyID,
+			cfg.AWSSecretAccessKey,
+		},
 		defaultModel:       cfg.DefaultModel,
 		inputTokenLimit:    cfg.InputTokenLimit,

Then widen the llm.SanitizeProviderError(...) helpers to accept ...string and pass b.redactionSecrets... at the call sites.

Also applies to: 200-200

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@bifrost/bifrost.go` around lines 36 - 37, The LLM struct currently stores
only apiKey causing AWS creds to be omitted from redaction; add a field like
redactionSecrets []string on the LLM/bifrost struct, populate it from
cfg.APIKey, cfg.AWSAccessKeyID and cfg.AWSSecretAccessKey when constructing the
struct, and remove the single apiKey-only usage, then update
llm.SanitizeProviderError to accept variadic secrets (...string) and change all
call sites to pass b.redactionSecrets... (e.g., where SanitizeProviderError(...)
is invoked) so Bedrock/AWS credential values are included in the redaction set.

inputTokenLimit int
outputTokenLimit int
Expand Down Expand Up @@ -196,6 +197,7 @@ func New(cfg Config) (*LLM, error) {
return &LLM{
client: client,
provider: cfg.Provider,
apiKey: cfg.APIKey,
defaultModel: cfg.DefaultModel,
inputTokenLimit: cfg.InputTokenLimit,
outputTokenLimit: outputLimit,
Expand Down Expand Up @@ -296,7 +298,7 @@ func (b *LLM) streamChat(request llm.CompletionRequest, cfg llm.LanguageModelCon
if bifrostErr != nil {
output <- llm.TextStreamEvent{
Type: llm.EventTypeError,
Value: fmt.Errorf("bifrost error: %s", bifrostErr.Error.Message),
Value: llm.SanitizeProviderError(fmt.Errorf("bifrost error: %s", bifrostErr.Error.Message), b.apiKey),
}
return
}
Expand Down Expand Up @@ -348,7 +350,7 @@ func (b *LLM) streamChat(request llm.CompletionRequest, cfg llm.LanguageModelCon
if chunk.BifrostError != nil {
output <- llm.TextStreamEvent{
Type: llm.EventTypeError,
Value: fmt.Errorf("stream error: %s", chunk.BifrostError.Error.Message),
Value: llm.SanitizeProviderError(fmt.Errorf("stream error: %s", chunk.BifrostError.Error.Message), b.apiKey),
}
return
}
Expand Down Expand Up @@ -1229,7 +1231,7 @@ func (b *LLM) streamResponses(request llm.CompletionRequest, cfg llm.LanguageMod
if bifrostErr != nil {
output <- llm.TextStreamEvent{
Type: llm.EventTypeError,
Value: fmt.Errorf("bifrost error: %s", bifrostErr.Error.Message),
Value: llm.SanitizeProviderError(fmt.Errorf("bifrost error: %s", bifrostErr.Error.Message), b.apiKey),
}
return
}
Expand Down Expand Up @@ -1287,7 +1289,7 @@ func (b *LLM) streamResponses(request llm.CompletionRequest, cfg llm.LanguageMod
if chunk.BifrostError != nil {
output <- llm.TextStreamEvent{
Type: llm.EventTypeError,
Value: fmt.Errorf("stream error: %s", chunk.BifrostError.Error.Message),
Value: llm.SanitizeProviderError(fmt.Errorf("stream error: %s", chunk.BifrostError.Error.Message), b.apiKey),
}
return
}
Expand Down
7 changes: 5 additions & 2 deletions bifrost/embeddings.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,14 @@ import (
"github.com/maximhq/bifrost/core/schemas"

"github.com/mattermost/mattermost-plugin-ai/embeddings"
"github.com/mattermost/mattermost-plugin-ai/llm"
)

// EmbeddingProvider implements the embeddings.EmbeddingProvider interface using Bifrost.
type EmbeddingProvider struct {
client *bifrostcore.Bifrost
provider schemas.ModelProvider
apiKey string // used only to redact configured secrets from provider error surfaces
model string
dimensions int
}
Expand Down Expand Up @@ -50,6 +52,7 @@ func NewEmbeddingProvider(cfg EmbeddingConfig) (*EmbeddingProvider, error) {
return &EmbeddingProvider{
client: client,
provider: cfg.Provider,
apiKey: cfg.APIKey,
model: cfg.Model,
dimensions: cfg.Dimensions,
}, nil
Expand All @@ -74,7 +77,7 @@ func (p *EmbeddingProvider) CreateEmbedding(ctx context.Context, text string) ([

resp, bifrostErr := p.client.EmbeddingRequest(bifrostCtx, req)
if bifrostErr != nil {
return nil, fmt.Errorf("bifrost embedding error: %s", bifrostErr.Error.Message)
return nil, llm.SanitizeProviderError(fmt.Errorf("bifrost embedding error: %s", bifrostErr.Error.Message), p.apiKey)
}

if resp == nil || len(resp.Data) == 0 {
Expand Down Expand Up @@ -109,7 +112,7 @@ func (p *EmbeddingProvider) BatchCreateEmbeddings(ctx context.Context, texts []s

resp, bifrostErr := p.client.EmbeddingRequest(bifrostCtx, req)
if bifrostErr != nil {
return nil, fmt.Errorf("bifrost batch embedding error: %s", bifrostErr.Error.Message)
return nil, llm.SanitizeProviderError(fmt.Errorf("bifrost batch embedding error: %s", bifrostErr.Error.Message), p.apiKey)
}

if resp == nil || len(resp.Data) == 0 {
Expand Down
2 changes: 1 addition & 1 deletion bifrost/models.go
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ func FetchModels(cfg FetchModelsConfig) ([]llm.ModelInfo, error) {

resp, bifrostErr := client.ListAllModels(bifrostCtx, req)
if bifrostErr != nil {
return nil, fmt.Errorf("bifrost list models error: %s", bifrostErr.Error.Message)
return nil, llm.SanitizeProviderError(fmt.Errorf("bifrost list models error: %s", bifrostErr.Error.Message), cfg.APIKey)
}

if resp == nil {
Expand Down
5 changes: 4 additions & 1 deletion bifrost/transcription.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,15 @@ import (
bifrostcore "github.com/maximhq/bifrost/core"
"github.com/maximhq/bifrost/core/schemas"

"github.com/mattermost/mattermost-plugin-ai/llm"
"github.com/mattermost/mattermost-plugin-ai/subtitles"
)

// Transcriber implements transcription using the Bifrost gateway.
type Transcriber struct {
client *bifrostcore.Bifrost
provider schemas.ModelProvider
apiKey string // used only to redact configured secrets from provider error surfaces
model string
}

Expand Down Expand Up @@ -56,6 +58,7 @@ func NewTranscriber(cfg TranscriptionConfig) (*Transcriber, error) {
return &Transcriber{
client: client,
provider: cfg.Provider,
apiKey: cfg.APIKey,
model: model,
}, nil
}
Expand Down Expand Up @@ -84,7 +87,7 @@ func (t *Transcriber) Transcribe(file io.Reader) (*subtitles.Subtitles, error) {

resp, bifrostErr := t.client.TranscriptionRequest(bifrostCtx, req)
if bifrostErr != nil {
return nil, fmt.Errorf("bifrost transcription error: %s", bifrostErr.Error.Message)
return nil, llm.SanitizeProviderError(fmt.Errorf("bifrost transcription error: %s", bifrostErr.Error.Message), t.apiKey)
}

if resp == nil || resp.Text == "" {
Expand Down
77 changes: 77 additions & 0 deletions llm/provider_error.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
// Copyright (c) 2023-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.

package llm

import (
"regexp"
"strings"
)

const providerErrorRedacted = "[REDACTED]"

var (
openAIAuthHeaderPattern = regexp.MustCompile(`(?i)(Authorization:\s*Bearer\s+)(\S+)`)
openAIJSONAPIKeyPattern = regexp.MustCompile(`(?i)("api(?:_|)key"\s*:\s*")([^"]+)(")`)
openAIIncorrectKeyPattern = regexp.MustCompile(`(?i)(Incorrect API key provided)(?::\s*[^"\r\n]+?)?(\.?\s+You can find your API key|["\r\n]|$)`)
openAIKeyPattern = regexp.MustCompile(`\bsk(?:-proj)?-[A-Za-z0-9_-]{10,}\b`)
anthropicKeyPattern = regexp.MustCompile(`\bsk-ant-[A-Za-z0-9_-]{20,}\b`)
)

// SanitizedProviderError wraps an upstream LLM error after redacting secrets from its message.
// It implements [errors.Unwrap] so [errors.Is] / [errors.As] chains on the original error are preserved.
type SanitizedProviderError struct {
message string
err error
}

func (e *SanitizedProviderError) Error() string {
return e.message
}

func (e *SanitizedProviderError) Unwrap() error {
return e.err
}

// SanitizeProviderErrorMessage applies the same redaction rules as [SanitizeProviderError] to a plain string.
// configuredAPIKey is additionally redacted when it appears as a substring (word-boundary safe).
func SanitizeProviderErrorMessage(message string, configuredAPIKey string) string {
sanitized := sanitizeProviderErrorMessagePlain(message)
apiKey := strings.TrimSpace(configuredAPIKey)
if apiKey != "" {
sanitized = replaceConfiguredAPIKeyInMessage(sanitized, apiKey)
}
return sanitized
}

// SanitizeProviderError redacts API keys, bearer tokens, and similar material from provider errors
// before those strings are logged, streamed to clients, or returned to callers.
func SanitizeProviderError(err error, configuredAPIKey string) error {
if err == nil {
return nil
}

sanitized := SanitizeProviderErrorMessage(err.Error(), configuredAPIKey)
if sanitized == err.Error() {
return err
}

return &SanitizedProviderError{
message: sanitized,
err: err,
}
}

func sanitizeProviderErrorMessagePlain(message string) string {
sanitized := openAIAuthHeaderPattern.ReplaceAllString(message, `${1}`+providerErrorRedacted)
sanitized = openAIJSONAPIKeyPattern.ReplaceAllString(sanitized, `${1}`+providerErrorRedacted+`${3}`)
sanitized = openAIIncorrectKeyPattern.ReplaceAllString(sanitized, `${1}`+`${2}`)
sanitized = openAIKeyPattern.ReplaceAllString(sanitized, providerErrorRedacted)
sanitized = anthropicKeyPattern.ReplaceAllString(sanitized, providerErrorRedacted)
return SanitizeNonPrintableChars(sanitized)
}

func replaceConfiguredAPIKeyInMessage(message string, apiKey string) string {
pattern := regexp.MustCompile(`(^|[^A-Za-z0-9_-])(` + regexp.QuoteMeta(apiKey) + `)([^A-Za-z0-9_-]|$)`)
return pattern.ReplaceAllString(message, `${1}`+providerErrorRedacted+`${3}`)
}
135 changes: 135 additions & 0 deletions llm/provider_error_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
// Copyright (c) 2023-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.

package llm

import (
"errors"
"fmt"
"testing"

"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

var errStreamingTimeout = errors.New("timeout streaming")

func TestSanitizeProviderError(t *testing.T) {
t.Run("preserves unrelated errors", func(t *testing.T) {
sanitizedErr := SanitizeProviderError(errStreamingTimeout, "")

assert.Same(t, errStreamingTimeout, sanitizedErr)
})

t.Run("redacts auth material from provider errors", func(t *testing.T) {
configuredKey := "this-is-my-disclosed-api-key"

tests := []struct {
name string
input string
wantContains string
wantNotContains []string
}{
{
name: "incorrect api key message",
input: `{"error":{"message":"Incorrect API key provided: this-is-my-disclosed-api-key. You can find your API key at https://platform.openai.com/account/api-keys.","type":"invalid_request_error","code":"invalid_api_key"}}`,
wantContains: `Incorrect API key provided. You can find your API key`,
wantNotContains: []string{
"this-is-my-disclosed-api-key",
},
},
{
name: "progressively masked key",
input: `{"error":{"message":"Incorrect API key provided: this-is-****************-key. You can find your API key at https://platform.openai.com/account/api-keys.","type":"invalid_request_error","code":"invalid_api_key"}}`,
wantContains: `Incorrect API key provided. You can find your API key`,
wantNotContains: []string{
"this-is-****************-key",
},
},
{
name: "authorization header",
input: `upstream failure: Authorization: Bearer sk-proj-1234567890abcdefghijklmnop`,
wantContains: `Authorization: Bearer [REDACTED]`,
wantNotContains: []string{
"sk-proj-1234567890abcdefghijklmnop",
},
},
{
name: "standalone openai key token",
input: `provider error: leaked sk-1234567890abcdefghij token`,
wantContains: `provider error: leaked [REDACTED] token`,
wantNotContains: []string{
"sk-1234567890abcdefghij",
},
},
{
name: "standalone anthropic key token",
input: `provider error: leaked sk-ant-1234567890abcdefghijklmnop`,
wantContains: `provider error: leaked [REDACTED]`,
wantNotContains: []string{
"sk-ant-1234567890abcdefghijklmnop",
},
},
{
name: "json api key field",
input: `{"apiKey":"this-is-my-disclosed-api-key","detail":"request failed"}`,
wantContains: `"apiKey":"[REDACTED]"`,
wantNotContains: []string{
"this-is-my-disclosed-api-key",
},
},
}

for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
sanitizedErr := SanitizeProviderError(errors.New(tt.input), configuredKey)
require.NotNil(t, sanitizedErr)
assert.Contains(t, sanitizedErr.Error(), tt.wantContains)
for _, secret := range tt.wantNotContains {
assert.NotContains(t, sanitizedErr.Error(), secret)
}
})
}
})

t.Run("redacts short configured api keys", func(t *testing.T) {
sanitizedErr := SanitizeProviderError(errors.New(`provider error: short`), "short")
require.NotNil(t, sanitizedErr)
assert.Equal(t, "provider error: [REDACTED]", sanitizedErr.Error())
})

t.Run("does not corrupt unrelated words for one character keys", func(t *testing.T) {
providerErrorMessage := `Unauthorized: Incorrect API key provided: t. You can find your API key at https://platform.openai.com/account/api-keys.`

sanitizedErr := SanitizeProviderError(errors.New(providerErrorMessage), "t")
require.NotNil(t, sanitizedErr)
assert.Equal(t, "Unauthorized: Incorrect API key provided. You can find your API key at https://platform.openai.com/account/api-keys.", sanitizedErr.Error())
assert.NotContains(t, sanitizedErr.Error(), "Unau[REDACTED]horized")
assert.NotContains(t, sanitizedErr.Error(), "Incorrect API key provided: t")
})

t.Run("preserves wrapped provider error chain", func(t *testing.T) {
originalErr := errors.New("provider error: short")

sanitizedErr := SanitizeProviderError(originalErr, "short")
require.NotNil(t, sanitizedErr)
assert.Equal(t, "provider error: [REDACTED]", sanitizedErr.Error())
assert.ErrorIs(t, sanitizedErr, originalErr)

var wrapped *SanitizedProviderError
require.ErrorAs(t, sanitizedErr, &wrapped)
assert.Equal(t, "provider error: [REDACTED]", wrapped.Error())
assert.Equal(t, originalErr, wrapped.Unwrap())
})
}

func TestSanitizeProviderError_bifrostStylePrefixes(t *testing.T) {
const key = "this-is-my-disclosed-api-key"
raw := `Incorrect API key provided: this-is-my-disclosed-api-key. You can find your API key at https://platform.openai.com/account/api-keys.`

err := SanitizeProviderError(fmt.Errorf("bifrost error: %s", raw), key)
require.Error(t, err)
assert.Contains(t, err.Error(), "bifrost error:")
assert.Contains(t, err.Error(), "Incorrect API key provided.")
assert.NotContains(t, err.Error(), key)
}
Loading