Skip to content

Commit a9c2ecd

Browse files
authored
Reference Models Centrally, set OpenAI Model to gpt-5-nano and Updated Example (#3792)
Signed-off-by: Erin La <[email protected]> Signed-off-by: Erin Lalor <[email protected]>
1 parent 0c9dd49 commit a9c2ecd

File tree

25 files changed

+201
-76
lines changed

25 files changed

+201
-76
lines changed

conversation/anthropic/anthropic.go

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -41,19 +41,15 @@ func NewAnthropic(logger logger.Logger) conversation.Conversation {
4141
return a
4242
}
4343

44-
const defaultModel = "claude-3-5-sonnet-20240620"
45-
4644
func (a *Anthropic) Init(ctx context.Context, meta conversation.Metadata) error {
4745
m := conversation.LangchainMetadata{}
4846
err := kmeta.DecodeMetadata(meta.Properties, &m)
4947
if err != nil {
5048
return err
5149
}
5250

53-
model := defaultModel
54-
if m.Model != "" {
55-
model = m.Model
56-
}
51+
// Resolve model via central helper (uses metadata, then env var, then default)
52+
model := conversation.GetAnthropicModel(m.Model)
5753

5854
llm, err := anthropic.New(
5955
anthropic.WithModel(model),

conversation/anthropic/metadata.yaml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,10 +24,10 @@ metadata:
2424
- name: model
2525
required: false
2626
description: |
27-
The Anthropic LLM to use.
27+
The Anthropic LLM to use. Configurable via ANTHROPIC_MODEL environment variable.
2828
type: string
29-
example: 'claude-3-5-sonnet-20240620'
30-
default: 'claude-3-5-sonnet-20240620'
29+
example: 'claude-sonnet-4-20250514'
30+
default: 'claude-sonnet-4-20250514'
3131
- name: cacheTTL
3232
required: false
3333
description: |

conversation/googleai/googleai.go

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -41,19 +41,15 @@ func NewGoogleAI(logger logger.Logger) conversation.Conversation {
4141
return g
4242
}
4343

44-
const defaultModel = "gemini-2.5-flash"
45-
4644
func (g *GoogleAI) Init(ctx context.Context, meta conversation.Metadata) error {
4745
md := conversation.LangchainMetadata{}
4846
err := kmeta.DecodeMetadata(meta.Properties, &md)
4947
if err != nil {
5048
return err
5149
}
5250

53-
model := defaultModel
54-
if md.Model != "" {
55-
model = md.Model
56-
}
51+
// Resolve model via central helper (uses metadata, then env var, then default)
52+
model := conversation.GetGoogleAIModel(md.Model)
5753

5854
opts := []openai.Option{
5955
openai.WithModel(model),

conversation/googleai/metadata.yaml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,10 +24,10 @@ metadata:
2424
- name: model
2525
required: false
2626
description: |
27-
The GoogleAI LLM to use.
27+
The GoogleAI LLM to use. Configurable via GOOGLEAI_MODEL environment variable.
2828
type: string
29-
example: 'gemini-2.5-flash'
30-
default: 'gemini-2.5-flash'
29+
example: 'gemini-2.5-flash-lite'
30+
default: 'gemini-2.5-flash-lite'
3131
- name: cacheTTL
3232
required: false
3333
description: |

conversation/huggingface/huggingface.go

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -42,9 +42,6 @@ func NewHuggingface(logger logger.Logger) conversation.Conversation {
4242
return h
4343
}
4444

45-
// Default model - using a popular and reliable model
46-
const defaultModel = "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B"
47-
4845
// Default HuggingFace OpenAI-compatible endpoint
4946
const defaultEndpoint = "https://router.huggingface.co/hf-inference/models/{{model}}/v1"
5047

@@ -55,10 +52,8 @@ func (h *Huggingface) Init(ctx context.Context, meta conversation.Metadata) erro
5552
return err
5653
}
5754

58-
model := defaultModel
59-
if m.Model != "" {
60-
model = m.Model
61-
}
55+
// Resolve model via central helper (uses metadata, then env var, then default)
56+
model := conversation.GetHuggingFaceModel(m.Model)
6257

6358
endpoint := strings.Replace(defaultEndpoint, "{{model}}", model, 1)
6459
if m.Endpoint != "" {

conversation/huggingface/metadata.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ metadata:
2424
- name: model
2525
required: false
2626
description: |
27-
The Huggingface model to use. Uses OpenAI-compatible API.
27+
The Huggingface model to use. Uses OpenAI-compatible API. Configurable via HUGGINGFACE_MODEL environment variable.
2828
type: string
2929
example: 'deepseek-ai/DeepSeek-R1-Distill-Qwen-32B'
3030
default: 'deepseek-ai/DeepSeek-R1-Distill-Qwen-32B'

conversation/metadata_test.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ func TestLangchainMetadata(t *testing.T) {
2525
t.Run("json marshaling with endpoint", func(t *testing.T) {
2626
metadata := LangchainMetadata{
2727
Key: "test-key",
28-
Model: "gpt-4",
28+
Model: DefaultOpenAIModel,
2929
CacheTTL: "10m",
3030
Endpoint: "https://custom-endpoint.example.com",
3131
}

conversation/mistral/metadata.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ metadata:
2424
- name: model
2525
required: false
2626
description: |
27-
The Mistral LLM to use.
27+
The Mistral LLM to use. Configurable via MISTRAL_MODEL environment variable.
2828
type: string
2929
example: 'open-mistral-7b'
3030
default: 'open-mistral-7b'

conversation/mistral/mistral.go

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -43,19 +43,15 @@ func NewMistral(logger logger.Logger) conversation.Conversation {
4343
return m
4444
}
4545

46-
const defaultModel = "open-mistral-7b"
47-
4846
func (m *Mistral) Init(ctx context.Context, meta conversation.Metadata) error {
4947
md := conversation.LangchainMetadata{}
5048
err := kmeta.DecodeMetadata(meta.Properties, &md)
5149
if err != nil {
5250
return err
5351
}
5452

55-
model := defaultModel
56-
if md.Model != "" {
57-
model = md.Model
58-
}
53+
// Resolve model via central helper (uses metadata, then env var, then default)
54+
model := conversation.GetMistralModel(md.Model)
5955

6056
llm, err := mistral.New(
6157
mistral.WithModel(model),

conversation/models.go

Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
/*
2+
Copyright 2025 The Dapr Authors
3+
Licensed under the Apache License, Version 2.0 (the "License");
4+
you may not use this file except in compliance with the License.
5+
You may obtain a copy of the License at
6+
7+
http://www.apache.org/licenses/LICENSE-2.0
8+
9+
Unless required by applicable law or agreed to in writing, software
10+
distributed under the License is distributed on an "AS IS" BASIS,
11+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
See the License for the specific language governing permissions and
13+
limitations under the License.
14+
*/
15+
package conversation
16+
17+
import (
18+
"os"
19+
)
20+
21+
// Default models for conversation components
22+
// These can be overridden via environment variables for runtime configuration
23+
const (
24+
// Environment variable names
25+
envOpenAIModel = "OPENAI_MODEL"
26+
envAzureOpenAIModel = "AZURE_OPENAI_MODEL"
27+
envAnthropicModel = "ANTHROPIC_MODEL"
28+
envGoogleAIModel = "GOOGLEAI_MODEL"
29+
envMistralModel = "MISTRAL_MODEL"
30+
envHuggingFaceModel = "HUGGINGFACE_MODEL"
31+
envOllamaModel = "OLLAMA_MODEL"
32+
)
33+
34+
// Exported default model constants for consumers of the conversation package.
35+
// These are used as fallbacks when env vars and metadata are not set.
36+
const (
37+
DefaultOpenAIModel = "gpt-5-nano" // Enable GPT-5 (Preview) for all clients
38+
DefaultAzureOpenAIModel = "gpt-4.1-nano" // Default Azure OpenAI model
39+
DefaultAnthropicModel = "claude-sonnet-4-20250514"
40+
DefaultGoogleAIModel = "gemini-2.5-flash-lite"
41+
DefaultMistralModel = "open-mistral-7b"
42+
DefaultHuggingFaceModel = "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B"
43+
DefaultOllamaModel = "llama3.2:latest"
44+
)
45+
46+
// getModel returns the value of an environment variable or a default value
47+
func getModel(envVar, defaultValue, metadataValue string) string {
48+
if value := os.Getenv(envVar); value != "" {
49+
return value
50+
}
51+
if metadataValue != "" {
52+
return metadataValue
53+
}
54+
return defaultValue
55+
}
56+
57+
// Example usage for model getters with metadata support:
58+
// Pass metadataValue from your metadata file/struct, or "" if not set.
59+
func GetOpenAIModel(metadataValue string) string {
60+
return getModel(envOpenAIModel, DefaultOpenAIModel, metadataValue)
61+
}
62+
63+
func GetAzureOpenAIModel(metadataValue string) string {
64+
return getModel(envAzureOpenAIModel, DefaultAzureOpenAIModel, metadataValue)
65+
}
66+
67+
func GetAnthropicModel(metadataValue string) string {
68+
return getModel(envAnthropicModel, DefaultAnthropicModel, metadataValue)
69+
}
70+
71+
func GetGoogleAIModel(metadataValue string) string {
72+
return getModel(envGoogleAIModel, DefaultGoogleAIModel, metadataValue)
73+
}
74+
75+
func GetMistralModel(metadataValue string) string {
76+
return getModel(envMistralModel, DefaultMistralModel, metadataValue)
77+
}
78+
79+
func GetHuggingFaceModel(metadataValue string) string {
80+
return getModel(envHuggingFaceModel, DefaultHuggingFaceModel, metadataValue)
81+
}
82+
83+
func GetOllamaModel(metadataValue string) string {
84+
return getModel(envOllamaModel, DefaultOllamaModel, metadataValue)
85+
}

0 commit comments

Comments
 (0)