From 8740c90c38edd4165efebee0d821615f8f925cd4 Mon Sep 17 00:00:00 2001 From: Erin La <107987318+giterinhub@users.noreply.github.com> Date: Tue, 1 Jul 2025 21:49:02 +0000 Subject: [PATCH 01/13] updated model version consistently to "gpt-4.1-nano" in various files Signed-off-by: Erin La <107987318+giterinhub@users.noreply.github.com> --- conversation/metadata_test.go | 2 +- conversation/openai/metadata.yaml | 4 ++-- conversation/openai/openai.go | 2 +- conversation/openai/openai_test.go | 4 ++-- tests/config/conversation/README.md | 2 +- tests/config/conversation/openai/openai.yml | 2 +- 6 files changed, 8 insertions(+), 8 deletions(-) diff --git a/conversation/metadata_test.go b/conversation/metadata_test.go index 58edab76e2..ce6c7fdc40 100644 --- a/conversation/metadata_test.go +++ b/conversation/metadata_test.go @@ -25,7 +25,7 @@ func TestLangchainMetadata(t *testing.T) { t.Run("json marshaling with endpoint", func(t *testing.T) { metadata := LangchainMetadata{ Key: "test-key", - Model: "gpt-4", + Model: "gpt-4.1-nano", CacheTTL: "10m", Endpoint: "https://custom-endpoint.example.com", } diff --git a/conversation/openai/metadata.yaml b/conversation/openai/metadata.yaml index 2adf807141..2df2fafabc 100644 --- a/conversation/openai/metadata.yaml +++ b/conversation/openai/metadata.yaml @@ -24,9 +24,9 @@ metadata: - name: model required: false description: | - The OpenAI LLM to use. Defaults to gpt-4o + The OpenAI LLM to use. Defaults to gpt-4.1-nano type: string - example: 'gpt-4-turbo' + example: 'gpt-4.1-nano' - name: endpoint required: false description: | diff --git a/conversation/openai/openai.go b/conversation/openai/openai.go index 6fc6b05298..1fe9391edd 100644 --- a/conversation/openai/openai.go +++ b/conversation/openai/openai.go @@ -41,7 +41,7 @@ func NewOpenAI(logger logger.Logger) conversation.Conversation { return o } -const defaultModel = "gpt-4o" +const defaultModel = "gpt-4.1-nano" func (o *OpenAI) Init(ctx context.Context, meta conversation.Metadata) error { md := conversation.LangchainMetadata{} diff --git a/conversation/openai/openai_test.go b/conversation/openai/openai_test.go index 16cc203b3b..1f0f09b631 100644 --- a/conversation/openai/openai_test.go +++ b/conversation/openai/openai_test.go @@ -34,7 +34,7 @@ func TestInit(t *testing.T) { name: "with default endpoint", metadata: map[string]string{ "key": "test-key", - "model": "gpt-4", + "model": "gpt-4.1-nano", }, testFn: func(t *testing.T, o *OpenAI, err error) { require.NoError(t, err) @@ -45,7 +45,7 @@ func TestInit(t *testing.T) { name: "with custom endpoint", metadata: map[string]string{ "key": "test-key", - "model": "gpt-4", + "model": "gpt-4.1-nano", "endpoint": "https://api.openai.com/v1", }, testFn: func(t *testing.T, o *OpenAI, err error) { diff --git a/tests/config/conversation/README.md b/tests/config/conversation/README.md index 1d30933d82..c553ab1a4b 100644 --- a/tests/config/conversation/README.md +++ b/tests/config/conversation/README.md @@ -95,7 +95,7 @@ Requires a local Ollama server running with the `llama3.2:latest` model availabl Each component has its own configuration file in this directory: - `echo/echo.yml` - Echo component configuration -- `openai/openai.yml` - OpenAI configuration with gpt-4o-mini model +- `openai/openai.yml` - OpenAI configuration with gpt-4.1-nano model - `anthropic/anthropic.yml` - Anthropic configuration with Claude 3 Haiku - `googleai/googleai.yml` - Google AI configuration with Gemini 1.5 Flash - `mistral/mistral.yml` - Mistral configuration with open-mistral-7b diff --git a/tests/config/conversation/openai/openai.yml b/tests/config/conversation/openai/openai.yml index 19eb55302f..d1e0541665 100644 --- a/tests/config/conversation/openai/openai.yml +++ b/tests/config/conversation/openai/openai.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{OPENAI_API_KEY}}" - name: model - value: "gpt-4o-mini" \ No newline at end of file + value: "gpt-4.1-nano" \ No newline at end of file From cff0983d6b8d6670ed5b7299abd011156f04a36a Mon Sep 17 00:00:00 2001 From: Erin La <107987318+giterinhub@users.noreply.github.com> Date: Fri, 29 Aug 2025 08:11:30 +0000 Subject: [PATCH 02/13] feat: centralize model management with environment variable configuration - Add conversation/models.go with centralized model constants using env vars - Update all conversation providers (OpenAI, Anthropic, Google AI, Mistral, HuggingFace, Ollama) to use centralized models - Replace hardcoded model values in config files with environment variables - Update metadata.yaml files to document environment variable usage - Enhance README.md with comprehensive environment variable documentation - Maintain backward compatibility with sensible fallback defaults This enables runtime model configuration without code changes, supporting different models per environment (dev/staging/prod) and infrastructure-as-code Signed-off-by: Erin La <107987318+giterinhub@users.noreply.github.com> --- conversation/anthropic/anthropic.go | 4 +- conversation/googleai/googleai.go | 4 +- conversation/huggingface/huggingface.go | 5 +- conversation/metadata_test.go | 2 +- conversation/mistral/mistral.go | 4 +- conversation/models.go | 70 +++++++++++++++++++ conversation/ollama/ollama.go | 4 +- conversation/openai/metadata.yaml | 4 +- conversation/openai/openai.go | 4 +- tests/config/conversation/README.md | 61 +++++++++++----- .../conversation/anthropic/anthropic.yml | 2 +- .../config/conversation/googleai/googleai.yml | 2 +- .../conversation/huggingface/huggingface.yml | 2 +- tests/config/conversation/mistral/mistral.yml | 2 +- tests/config/conversation/ollama/ollama.yml | 2 +- tests/config/conversation/openai/openai.yml | 2 +- 16 files changed, 130 insertions(+), 44 deletions(-) create mode 100644 conversation/models.go diff --git a/conversation/anthropic/anthropic.go b/conversation/anthropic/anthropic.go index fc7d4638e6..eaa21aa848 100644 --- a/conversation/anthropic/anthropic.go +++ b/conversation/anthropic/anthropic.go @@ -41,8 +41,6 @@ func NewAnthropic(logger logger.Logger) conversation.Conversation { return a } -const defaultModel = "claude-3-5-sonnet-20240620" - func (a *Anthropic) Init(ctx context.Context, meta conversation.Metadata) error { m := conversation.LangchainMetadata{} err := kmeta.DecodeMetadata(meta.Properties, &m) @@ -50,7 +48,7 @@ func (a *Anthropic) Init(ctx context.Context, meta conversation.Metadata) error return err } - model := defaultModel + model := conversation.DefaultAnthropicModel if m.Model != "" { model = m.Model } diff --git a/conversation/googleai/googleai.go b/conversation/googleai/googleai.go index 58aad5c368..06ad155734 100644 --- a/conversation/googleai/googleai.go +++ b/conversation/googleai/googleai.go @@ -41,8 +41,6 @@ func NewGoogleAI(logger logger.Logger) conversation.Conversation { return g } -const defaultModel = "gemini-1.5-flash" - func (g *GoogleAI) Init(ctx context.Context, meta conversation.Metadata) error { md := conversation.LangchainMetadata{} err := kmeta.DecodeMetadata(meta.Properties, &md) @@ -50,7 +48,7 @@ func (g *GoogleAI) Init(ctx context.Context, meta conversation.Metadata) error { return err } - model := defaultModel + model := conversation.DefaultGoogleAIModel if md.Model != "" { model = md.Model } diff --git a/conversation/huggingface/huggingface.go b/conversation/huggingface/huggingface.go index 0ef727f874..f1b94bf526 100644 --- a/conversation/huggingface/huggingface.go +++ b/conversation/huggingface/huggingface.go @@ -42,9 +42,6 @@ func NewHuggingface(logger logger.Logger) conversation.Conversation { return h } -// Default model - using a popular and reliable model -const defaultModel = "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" - // Default HuggingFace OpenAI-compatible endpoint const defaultEndpoint = "https://router.huggingface.co/hf-inference/models/{{model}}/v1" @@ -55,7 +52,7 @@ func (h *Huggingface) Init(ctx context.Context, meta conversation.Metadata) erro return err } - model := defaultModel + model := conversation.DefaultHuggingFaceModel if m.Model != "" { model = m.Model } diff --git a/conversation/metadata_test.go b/conversation/metadata_test.go index ce6c7fdc40..de83e82cb0 100644 --- a/conversation/metadata_test.go +++ b/conversation/metadata_test.go @@ -25,7 +25,7 @@ func TestLangchainMetadata(t *testing.T) { t.Run("json marshaling with endpoint", func(t *testing.T) { metadata := LangchainMetadata{ Key: "test-key", - Model: "gpt-4.1-nano", + Model: DefaultOpenAIModel, CacheTTL: "10m", Endpoint: "https://custom-endpoint.example.com", } diff --git a/conversation/mistral/mistral.go b/conversation/mistral/mistral.go index 8a0687163d..7cfb7973af 100644 --- a/conversation/mistral/mistral.go +++ b/conversation/mistral/mistral.go @@ -41,8 +41,6 @@ func NewMistral(logger logger.Logger) conversation.Conversation { return m } -const defaultModel = "open-mistral-7b" - func (m *Mistral) Init(ctx context.Context, meta conversation.Metadata) error { md := conversation.LangchainMetadata{} err := kmeta.DecodeMetadata(meta.Properties, &md) @@ -50,7 +48,7 @@ func (m *Mistral) Init(ctx context.Context, meta conversation.Metadata) error { return err } - model := defaultModel + model := conversation.DefaultMistralModel if md.Model != "" { model = md.Model } diff --git a/conversation/models.go b/conversation/models.go new file mode 100644 index 0000000000..a5f2fb54c6 --- /dev/null +++ b/conversation/models.go @@ -0,0 +1,70 @@ +/* +Copyright 2024 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +package conversation + +import ( + "os" +) + +// Default models for conversation components +// These can be overridden via environment variables for runtime configuration +const ( + // Environment variable names + envOpenAIModel = "DAPR_CONVERSATION_OPENAI_MODEL" + envAnthropicModel = "DAPR_CONVERSATION_ANTHROPIC_MODEL" + envGoogleAIModel = "DAPR_CONVERSATION_GOOGLEAI_MODEL" + envMistralModel = "DAPR_CONVERSATION_MISTRAL_MODEL" + envHuggingFaceModel = "DAPR_CONVERSATION_HUGGINGFACE_MODEL" + envOllamaModel = "DAPR_CONVERSATION_OLLAMA_MODEL" +) + +// Default model values (used as fallbacks when env vars are not set) +const ( + defaultOpenAIModel = "gpt-5-nano" + defaultAnthropicModel = "claude-3-5-sonnet-20240620" + defaultGoogleAIModel = "gemini-1.5-flash" + defaultMistralModel = "open-mistral-7b" + defaultHuggingFaceModel = "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" + defaultOllamaModel = "llama3.2:latest" +) + +// getEnvOrDefault returns the value of an environment variable or a default value +func getEnvOrDefault(envVar, defaultValue string) string { + if value := os.Getenv(envVar); value != "" { + return value + } + return defaultValue +} + +// Default model getters that check environment variables first +var ( + // DefaultOpenAIModel returns the OpenAI model, checking env var first + DefaultOpenAIModel = getEnvOrDefault(envOpenAIModel, defaultOpenAIModel) + + // DefaultAnthropicModel returns the Anthropic model, checking env var first + DefaultAnthropicModel = getEnvOrDefault(envAnthropicModel, defaultAnthropicModel) + + // DefaultGoogleAIModel returns the Google AI model, checking env var first + DefaultGoogleAIModel = getEnvOrDefault(envGoogleAIModel, defaultGoogleAIModel) + + // DefaultMistralModel returns the Mistral model, checking env var first + DefaultMistralModel = getEnvOrDefault(envMistralModel, defaultMistralModel) + + // DefaultHuggingFaceModel returns the HuggingFace model, checking env var first + DefaultHuggingFaceModel = getEnvOrDefault(envHuggingFaceModel, defaultHuggingFaceModel) + + // DefaultOllamaModel returns the Ollama model, checking env var first + DefaultOllamaModel = getEnvOrDefault(envOllamaModel, defaultOllamaModel) +) diff --git a/conversation/ollama/ollama.go b/conversation/ollama/ollama.go index d3f7aa0913..fd8d347386 100644 --- a/conversation/ollama/ollama.go +++ b/conversation/ollama/ollama.go @@ -41,8 +41,6 @@ func NewOllama(logger logger.Logger) conversation.Conversation { return o } -const defaultModel = "llama3.2:latest" - func (o *Ollama) Init(ctx context.Context, meta conversation.Metadata) error { md := conversation.LangchainMetadata{} err := kmeta.DecodeMetadata(meta.Properties, &md) @@ -50,7 +48,7 @@ func (o *Ollama) Init(ctx context.Context, meta conversation.Metadata) error { return err } - model := defaultModel + model := conversation.DefaultOllamaModel if md.Model != "" { model = md.Model } diff --git a/conversation/openai/metadata.yaml b/conversation/openai/metadata.yaml index 2df2fafabc..1675088559 100644 --- a/conversation/openai/metadata.yaml +++ b/conversation/openai/metadata.yaml @@ -24,9 +24,9 @@ metadata: - name: model required: false description: | - The OpenAI LLM to use. Defaults to gpt-4.1-nano + The OpenAI LLM to use. Defaults to gpt-5-nano (configurable via DAPR_CONVERSATION_OPENAI_MODEL environment variable) type: string - example: 'gpt-4.1-nano' + example: '${{DAPR_CONVERSATION_OPENAI_MODEL}}' - name: endpoint required: false description: | diff --git a/conversation/openai/openai.go b/conversation/openai/openai.go index 1fe9391edd..9711f1c7ce 100644 --- a/conversation/openai/openai.go +++ b/conversation/openai/openai.go @@ -41,8 +41,6 @@ func NewOpenAI(logger logger.Logger) conversation.Conversation { return o } -const defaultModel = "gpt-4.1-nano" - func (o *OpenAI) Init(ctx context.Context, meta conversation.Metadata) error { md := conversation.LangchainMetadata{} err := kmeta.DecodeMetadata(meta.Properties, &md) @@ -50,7 +48,7 @@ func (o *OpenAI) Init(ctx context.Context, meta conversation.Metadata) error { return err } - model := defaultModel + model := conversation.DefaultOpenAIModel if md.Model != "" { model = md.Model } diff --git a/tests/config/conversation/README.md b/tests/config/conversation/README.md index c553ab1a4b..be78e00385 100644 --- a/tests/config/conversation/README.md +++ b/tests/config/conversation/README.md @@ -46,48 +46,75 @@ cp env.template .env Alternatively, you can set the following environment variables to run the respective tests: -### OpenAI +#### Model Configuration (Optional) + +You can override the default models used by each component by setting these environment variables: + +```bash +export DAPR_CONVERSATION_OPENAI_MODEL="gpt-5-nano" # Default: gpt-5-nano +export DAPR_CONVERSATION_ANTHROPIC_MODEL="claude-3-5-sonnet-20240620" # Default: claude-3-5-sonnet-20240620 +export DAPR_CONVERSATION_GOOGLEAI_MODEL="gemini-1.5-flash" # Default: gemini-1.5-flash +export DAPR_CONVERSATION_MISTRAL_MODEL="open-mistral-7b" # Default: open-mistral-7b +export DAPR_CONVERSATION_HUGGINGFACE_MODEL="deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" # Default: deepseek-ai/DeepSeek-R1-Distill-Qwen-32B +export DAPR_CONVERSATION_OLLAMA_MODEL="llama3.2:latest" # Default: llama3.2:latest +``` + +#### OpenAI + ```bash export OPENAI_API_KEY="your_openai_api_key" ``` -Get your API key from: https://platform.openai.com/api-keys -### Anthropic +Get your API key from: + +#### Anthropic + ```bash export ANTHROPIC_API_KEY="your_anthropic_api_key" ``` -Get your API key from: https://console.anthropic.com/ -### Google AI +Get your API key from: + +#### Google AI + ```bash export GOOGLE_AI_API_KEY="your_google_ai_api_key" ``` -Get your API key from: https://aistudio.google.com/app/apikey -### Mistral +Get your API key from: + +#### Mistral + ```bash export MISTRAL_API_KEY="your_mistral_api_key" ``` -Get your API key from: https://console.mistral.ai/ -### HuggingFace +Get your API key from: + +#### HuggingFace + ```bash export HUGGINGFACE_API_KEY="your_huggingface_api_key" ``` -Get your API key from: https://huggingface.co/settings/tokens -### AWS Bedrock +Get your API key from: + +#### AWS Bedrock + ```bash export AWS_ACCESS_KEY_ID="your_aws_access_key" export AWS_SECRET_ACCESS_KEY="your_aws_secret_key" export AWS_REGION="us-east-1" # Optional, defaults to us-east-1 ``` + Get your credentials from AWS Console -### Ollama +#### Ollama + ```bash export OLLAMA_ENABLED="1" ``` + Requires a local Ollama server running with the `llama3.2:latest` model available. ## Test Configuration @@ -95,7 +122,7 @@ Requires a local Ollama server running with the `llama3.2:latest` model availabl Each component has its own configuration file in this directory: - `echo/echo.yml` - Echo component configuration -- `openai/openai.yml` - OpenAI configuration with gpt-4.1-nano model +- `openai/openai.yml` - OpenAI configuration with gpt-5-nano model - `anthropic/anthropic.yml` - Anthropic configuration with Claude 3 Haiku - `googleai/googleai.yml` - Google AI configuration with Gemini 1.5 Flash - `mistral/mistral.yml` - Mistral configuration with open-mistral-7b @@ -109,13 +136,15 @@ The configurations use cost-effective models where possible to minimize testing The HuggingFace component uses a workaround due to issues with the native HuggingFace implementation in langchaingo. Instead of using the HuggingFace SDK directly, it uses the OpenAI SDK with HuggingFace's OpenAI-compatible API endpoints. -### How it works: +### How it works + - **Model Selection**: Any HuggingFace model can be used by specifying its full name (e.g., `deepseek-ai/DeepSeek-R1-Distill-Qwen-32B`) - **Dynamic Endpoints**: The endpoint URL is automatically generated based on the model name using the template: `https://router.huggingface.co/hf-inference/models/{{model}}/v1` - **Custom Endpoints**: You can override the endpoint by specifying a custom `endpoint` parameter - **Authentication**: Uses the same HuggingFace API key authentication -### Example Configuration: +### Example Configuration + ```yaml apiVersion: dapr.io/v1alpha1 kind: Component @@ -142,4 +171,4 @@ This approach provides better reliability and compatibility while maintaining ac - Cost-effective models are used by default to minimize API costs - HuggingFace uses the OpenAI compatibility layer as a workaround due to langchaingo API issues - Ollama requires a local server and must be explicitly enabled -- All tests include proper initialization and basic conversation functionality testing \ No newline at end of file +- All tests include proper initialization and basic conversation functionality testing diff --git a/tests/config/conversation/anthropic/anthropic.yml b/tests/config/conversation/anthropic/anthropic.yml index 22353e19cc..207b1154d8 100644 --- a/tests/config/conversation/anthropic/anthropic.yml +++ b/tests/config/conversation/anthropic/anthropic.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{ANTHROPIC_API_KEY}}" - name: model - value: "claude-3-haiku-20240307" \ No newline at end of file + value: "${{DAPR_CONVERSATION_ANTHROPIC_MODEL}}" \ No newline at end of file diff --git a/tests/config/conversation/googleai/googleai.yml b/tests/config/conversation/googleai/googleai.yml index d2ad6ee25f..f6f1bd191c 100644 --- a/tests/config/conversation/googleai/googleai.yml +++ b/tests/config/conversation/googleai/googleai.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{GOOGLE_AI_API_KEY}}" - name: model - value: "gemini-1.5-flash" \ No newline at end of file + value: "${{DAPR_CONVERSATION_GOOGLEAI_MODEL}}" \ No newline at end of file diff --git a/tests/config/conversation/huggingface/huggingface.yml b/tests/config/conversation/huggingface/huggingface.yml index 4af48ad7c8..06e964835f 100644 --- a/tests/config/conversation/huggingface/huggingface.yml +++ b/tests/config/conversation/huggingface/huggingface.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{HUGGINGFACE_API_KEY}}" - name: model - value: "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" \ No newline at end of file + value: "${{DAPR_CONVERSATION_HUGGINGFACE_MODEL}}" \ No newline at end of file diff --git a/tests/config/conversation/mistral/mistral.yml b/tests/config/conversation/mistral/mistral.yml index 016a8b5317..fd219b0a26 100644 --- a/tests/config/conversation/mistral/mistral.yml +++ b/tests/config/conversation/mistral/mistral.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{MISTRAL_API_KEY}}" - name: model - value: "open-mistral-7b" \ No newline at end of file + value: "${{DAPR_CONVERSATION_MISTRAL_MODEL}}" \ No newline at end of file diff --git a/tests/config/conversation/ollama/ollama.yml b/tests/config/conversation/ollama/ollama.yml index c144669c53..176ecb2478 100644 --- a/tests/config/conversation/ollama/ollama.yml +++ b/tests/config/conversation/ollama/ollama.yml @@ -7,4 +7,4 @@ spec: version: v1 metadata: - name: model - value: "llama3.2:latest" \ No newline at end of file + value: "${{DAPR_CONVERSATION_OLLAMA_MODEL}}" \ No newline at end of file diff --git a/tests/config/conversation/openai/openai.yml b/tests/config/conversation/openai/openai.yml index d1e0541665..ee3e3de880 100644 --- a/tests/config/conversation/openai/openai.yml +++ b/tests/config/conversation/openai/openai.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{OPENAI_API_KEY}}" - name: model - value: "gpt-4.1-nano" \ No newline at end of file + value: "${{DAPR_CONVERSATION_OPENAI_MODEL}}" \ No newline at end of file From 6931a299f683333b8ac06c7d4e032547097f4b38 Mon Sep 17 00:00:00 2001 From: Erin La <107987318+giterinhub@users.noreply.github.com> Date: Fri, 29 Aug 2025 08:18:09 +0000 Subject: [PATCH 03/13] feat: centralize model management with environment variable configuration - Add conversation/models.go with centralized model constants using env vars - Update all conversation providers (OpenAI, Anthropic, Google AI, Mistral, HuggingFace, Ollama) to use centralized models - Replace hardcoded model values in config files with environment variables - Update metadata.yaml files to document environment variable usage - Enhance README.md with comprehensive environment variable documentation - Maintain backward compatibility with sensible fallback defaults This enables runtime model configuration without code changes, supporting different models per environment (dev/staging/prod) and infrastructure-as-code deployments. Signed-off-by: Erin La <107987318+giterinhub@users.noreply.github.com> --- conversation/models.go | 20 ++++++++++---------- tests/config/conversation/openai/openai.yml | 12 ++++++++++++ 2 files changed, 22 insertions(+), 10 deletions(-) create mode 100644 tests/config/conversation/openai/openai.yml diff --git a/conversation/models.go b/conversation/models.go index a5f2fb54c6..0b0453fff1 100644 --- a/conversation/models.go +++ b/conversation/models.go @@ -22,22 +22,22 @@ import ( // These can be overridden via environment variables for runtime configuration const ( // Environment variable names - envOpenAIModel = "DAPR_CONVERSATION_OPENAI_MODEL" - envAnthropicModel = "DAPR_CONVERSATION_ANTHROPIC_MODEL" - envGoogleAIModel = "DAPR_CONVERSATION_GOOGLEAI_MODEL" - envMistralModel = "DAPR_CONVERSATION_MISTRAL_MODEL" + envOpenAIModel = "DAPR_CONVERSATION_OPENAI_MODEL" + envAnthropicModel = "DAPR_CONVERSATION_ANTHROPIC_MODEL" + envGoogleAIModel = "DAPR_CONVERSATION_GOOGLEAI_MODEL" + envMistralModel = "DAPR_CONVERSATION_MISTRAL_MODEL" envHuggingFaceModel = "DAPR_CONVERSATION_HUGGINGFACE_MODEL" - envOllamaModel = "DAPR_CONVERSATION_OLLAMA_MODEL" + envOllamaModel = "DAPR_CONVERSATION_OLLAMA_MODEL" ) // Default model values (used as fallbacks when env vars are not set) const ( - defaultOpenAIModel = "gpt-5-nano" - defaultAnthropicModel = "claude-3-5-sonnet-20240620" - defaultGoogleAIModel = "gemini-1.5-flash" - defaultMistralModel = "open-mistral-7b" + defaultOpenAIModel = "gpt-5-nano" + defaultAnthropicModel = "claude-3-5-sonnet-20240620" + defaultGoogleAIModel = "gemini-1.5-flash" + defaultMistralModel = "open-mistral-7b" defaultHuggingFaceModel = "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" - defaultOllamaModel = "llama3.2:latest" + defaultOllamaModel = "llama3.2:latest" ) // getEnvOrDefault returns the value of an environment variable or a default value diff --git a/tests/config/conversation/openai/openai.yml b/tests/config/conversation/openai/openai.yml new file mode 100644 index 0000000000..ee3e3de880 --- /dev/null +++ b/tests/config/conversation/openai/openai.yml @@ -0,0 +1,12 @@ +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: openai +spec: + type: conversation.openai + version: v1 + metadata: + - name: key + value: "${{OPENAI_API_KEY}}" + - name: model + value: "${{DAPR_CONVERSATION_OPENAI_MODEL}}" \ No newline at end of file From e7a68a8ed65e89a5f669ace3dcddaf02c0fcc2ba Mon Sep 17 00:00:00 2001 From: Erin La <107987318+giterinhub@users.noreply.github.com> Date: Fri, 29 Aug 2025 09:10:40 +0000 Subject: [PATCH 04/13] feat: update model examples to use environment variable placeholders Signed-off-by: Erin La <107987318+giterinhub@users.noreply.github.com> --- conversation/googleai/metadata.yaml | 3 +-- conversation/huggingface/metadata.yaml | 3 +-- conversation/mistral/metadata.yaml | 2 +- conversation/ollama/metadata.yaml | 3 +-- conversation/openai/openai_test.go | 10 +++++----- 5 files changed, 9 insertions(+), 12 deletions(-) diff --git a/conversation/googleai/metadata.yaml b/conversation/googleai/metadata.yaml index 848b864c73..41ea72ccc0 100644 --- a/conversation/googleai/metadata.yaml +++ b/conversation/googleai/metadata.yaml @@ -26,8 +26,7 @@ metadata: description: | The GoogleAI LLM to use. type: string - example: 'gemini-2.0-flash' - default: 'gemini-2.0-flash' + example: '${{DAPR_CONVERSATION_GOOGLEAI_MODEL}}' - name: cacheTTL required: false description: | diff --git a/conversation/huggingface/metadata.yaml b/conversation/huggingface/metadata.yaml index 89ec7d01a5..1f17ea4d51 100644 --- a/conversation/huggingface/metadata.yaml +++ b/conversation/huggingface/metadata.yaml @@ -26,8 +26,7 @@ metadata: description: | The Huggingface model to use. Uses OpenAI-compatible API. type: string - example: 'deepseek-ai/DeepSeek-R1-Distill-Qwen-32B' - default: 'deepseek-ai/DeepSeek-R1-Distill-Qwen-32B' + example: '${{DAPR_CONVERSATION_HUGGINGFACE_MODEL}}' - name: endpoint required: false description: | diff --git a/conversation/mistral/metadata.yaml b/conversation/mistral/metadata.yaml index 329379dc24..3e64ba0944 100644 --- a/conversation/mistral/metadata.yaml +++ b/conversation/mistral/metadata.yaml @@ -26,7 +26,7 @@ metadata: description: | The Mistral LLM to use. type: string - example: 'open-mistral-7b' + example: '${{DAPR_CONVERSATION_MISTRAL_MODEL}}' default: 'open-mistral-7b' - name: cacheTTL required: false diff --git a/conversation/ollama/metadata.yaml b/conversation/ollama/metadata.yaml index 113c7fbcd6..59690bb4ca 100644 --- a/conversation/ollama/metadata.yaml +++ b/conversation/ollama/metadata.yaml @@ -14,8 +14,7 @@ metadata: description: | The Ollama LLM to use. type: string - example: 'llama3.2:latest' - default: 'llama3.2:latest' + example: '${{DAPR_CONVERSATION_OLLAMA_MODEL}}' - name: cacheTTL required: false description: | diff --git a/conversation/openai/openai_test.go b/conversation/openai/openai_test.go index 3103838244..0c645b2032 100644 --- a/conversation/openai/openai_test.go +++ b/conversation/openai/openai_test.go @@ -34,7 +34,7 @@ func TestInit(t *testing.T) { name: "with default endpoint", metadata: map[string]string{ "key": "test-key", - "model": "gpt-4.1-nano", + "model": conversation.DefaultOpenAIModel, }, testFn: func(t *testing.T, o *OpenAI, err error) { require.NoError(t, err) @@ -45,7 +45,7 @@ func TestInit(t *testing.T) { name: "with custom endpoint", metadata: map[string]string{ "key": "test-key", - "model": "gpt-4.1-nano", + "model": conversation.DefaultOpenAIModel, "endpoint": "https://api.openai.com/v1", }, testFn: func(t *testing.T, o *OpenAI, err error) { @@ -59,7 +59,7 @@ func TestInit(t *testing.T) { name: "with apiType azure and missing apiVersion", metadata: map[string]string{ "key": "test-key", - "model": "gpt-4", + "model": conversation.DefaultOpenAIModel, "apiType": "azure", "endpoint": "https://custom-endpoint.openai.azure.com/", }, @@ -72,7 +72,7 @@ func TestInit(t *testing.T) { name: "with apiType azure and custom apiVersion", metadata: map[string]string{ "key": "test-key", - "model": "gpt-4", + "model": conversation.DefaultOpenAIModel, "apiType": "azure", "endpoint": "https://custom-endpoint.openai.azure.com/", "apiVersion": "2025-01-01-preview", @@ -86,7 +86,7 @@ func TestInit(t *testing.T) { name: "with apiType azure but missing endpoint", metadata: map[string]string{ "key": "test-key", - "model": "gpt-4", + "model": conversation.DefaultOpenAIModel, "apiType": "azure", "apiVersion": "2025-01-01-preview", }, From 2d151c14c13ab08f5109e71428f04be95e23741b Mon Sep 17 00:00:00 2001 From: Erin La <107987318+giterinhub@users.noreply.github.com> Date: Wed, 3 Sep 2025 17:48:18 +0000 Subject: [PATCH 05/13] feat: enhance model retrieval with metadata support in getters. Updated anthropic and google model to latest defaults Signed-off-by: Erin La <107987318+giterinhub@users.noreply.github.com> --- conversation/models.go | 46 +++++++++++++++++++++++++----------------- 1 file changed, 27 insertions(+), 19 deletions(-) diff --git a/conversation/models.go b/conversation/models.go index 0b0453fff1..20b2333bd1 100644 --- a/conversation/models.go +++ b/conversation/models.go @@ -32,39 +32,47 @@ const ( // Default model values (used as fallbacks when env vars are not set) const ( - defaultOpenAIModel = "gpt-5-nano" - defaultAnthropicModel = "claude-3-5-sonnet-20240620" - defaultGoogleAIModel = "gemini-1.5-flash" + defaultOpenAIModel = "gpt-5-nano" // Enable GPT-5 (Preview) for all clients + defaultAnthropicModel = "claude-sonnet-4-20250514" + defaultGoogleAIModel = "gemini-2.5-flash-lite" defaultMistralModel = "open-mistral-7b" defaultHuggingFaceModel = "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" defaultOllamaModel = "llama3.2:latest" ) // getEnvOrDefault returns the value of an environment variable or a default value -func getEnvOrDefault(envVar, defaultValue string) string { +func getModelValue(envVar, defaultValue, metadataValue string) string { + if metadataValue != "" { + return metadataValue + } if value := os.Getenv(envVar); value != "" { return value } return defaultValue } -// Default model getters that check environment variables first -var ( - // DefaultOpenAIModel returns the OpenAI model, checking env var first - DefaultOpenAIModel = getEnvOrDefault(envOpenAIModel, defaultOpenAIModel) +// Example usage for model getters with metadata support: +// Pass metadataValue from your metadata file/struct, or "" if not set. +func GetOpenAIModel(metadataValue string) string { + return getModelValue(envOpenAIModel, defaultOpenAIModel, metadataValue) +} - // DefaultAnthropicModel returns the Anthropic model, checking env var first - DefaultAnthropicModel = getEnvOrDefault(envAnthropicModel, defaultAnthropicModel) +func GetAnthropicModel(metadataValue string) string { + return getModelValue(envAnthropicModel, defaultAnthropicModel, metadataValue) +} - // DefaultGoogleAIModel returns the Google AI model, checking env var first - DefaultGoogleAIModel = getEnvOrDefault(envGoogleAIModel, defaultGoogleAIModel) +func GetGoogleAIModel(metadataValue string) string { + return getModelValue(envGoogleAIModel, defaultGoogleAIModel, metadataValue) +} - // DefaultMistralModel returns the Mistral model, checking env var first - DefaultMistralModel = getEnvOrDefault(envMistralModel, defaultMistralModel) +func GetMistralModel(metadataValue string) string { + return getModelValue(envMistralModel, defaultMistralModel, metadataValue) +} - // DefaultHuggingFaceModel returns the HuggingFace model, checking env var first - DefaultHuggingFaceModel = getEnvOrDefault(envHuggingFaceModel, defaultHuggingFaceModel) +func GetHuggingFaceModel(metadataValue string) string { + return getModelValue(envHuggingFaceModel, defaultHuggingFaceModel, metadataValue) +} - // DefaultOllamaModel returns the Ollama model, checking env var first - DefaultOllamaModel = getEnvOrDefault(envOllamaModel, defaultOllamaModel) -) +func GetOllamaModel(metadataValue string) string { + return getModelValue(envOllamaModel, defaultOllamaModel, metadataValue) +} From 2c99617fbc168b7874c3c6d9a0be9773f1ab87c2 Mon Sep 17 00:00:00 2001 From: Erin La <107987318+giterinhub@users.noreply.github.com> Date: Thu, 4 Sep 2025 06:35:29 +0000 Subject: [PATCH 06/13] feat: centralize conversation model management with fallback hierarchy - Add centralized model management in conversation/models.go with default constants - Implement Get*Model() functions supporting metadata > env var > default fallback - Update all conversation providers (openai, anthropic, googleai, mistral, huggingface, ollama) to use centralized model getters - Add Azure OpenAI model support with AZURE_OPENAI_MODEL env var - Update test configurations to use optional environment variables with empty string fallbacks - Enhance test framework to support ${{ENV_VAR||}} syntax for optional env vars - Remove hardcoded model defaults from test configs, allowing Go code defaults to be used This enables flexible model configuration: specified in metadata, environment variables, or fallback to sensible Signed-off-by: Erin La <107987318+giterinhub@users.noreply.github.com> --- conversation/anthropic/anthropic.go | 6 ++-- conversation/googleai/googleai.go | 6 ++-- conversation/huggingface/huggingface.go | 6 ++-- conversation/mistral/mistral.go | 6 ++-- conversation/models.go | 33 +++++++++++-------- conversation/ollama/ollama.go | 6 ++-- conversation/openai/openai.go | 9 +++-- .../conversation/anthropic/anthropic.yml | 2 +- .../config/conversation/googleai/googleai.yml | 2 +- .../conversation/huggingface/huggingface.yml | 2 +- tests/config/conversation/mistral/mistral.yml | 2 +- tests/config/conversation/ollama/ollama.yml | 2 +- .../conversation/openai/azure/openai.yml | 2 +- tests/config/conversation/openai/openai.yml | 12 ------- .../conversation/openai/openai/openai.yml | 2 +- tests/conformance/common.go | 15 +++++++++ 16 files changed, 58 insertions(+), 55 deletions(-) delete mode 100644 tests/config/conversation/openai/openai.yml diff --git a/conversation/anthropic/anthropic.go b/conversation/anthropic/anthropic.go index eaa21aa848..19771d1a06 100644 --- a/conversation/anthropic/anthropic.go +++ b/conversation/anthropic/anthropic.go @@ -48,10 +48,8 @@ func (a *Anthropic) Init(ctx context.Context, meta conversation.Metadata) error return err } - model := conversation.DefaultAnthropicModel - if m.Model != "" { - model = m.Model - } + // Resolve model via central helper (uses metadata, then env var, then default) + model := conversation.GetAnthropicModel(m.Model) llm, err := anthropic.New( anthropic.WithModel(model), diff --git a/conversation/googleai/googleai.go b/conversation/googleai/googleai.go index 06ad155734..7f1699381a 100644 --- a/conversation/googleai/googleai.go +++ b/conversation/googleai/googleai.go @@ -48,10 +48,8 @@ func (g *GoogleAI) Init(ctx context.Context, meta conversation.Metadata) error { return err } - model := conversation.DefaultGoogleAIModel - if md.Model != "" { - model = md.Model - } + // Resolve model via central helper (uses metadata, then env var, then default) + model := conversation.GetGoogleAIModel(md.Model) opts := []googleai.Option{ googleai.WithAPIKey(md.Key), diff --git a/conversation/huggingface/huggingface.go b/conversation/huggingface/huggingface.go index f1b94bf526..d6d91ca5af 100644 --- a/conversation/huggingface/huggingface.go +++ b/conversation/huggingface/huggingface.go @@ -52,10 +52,8 @@ func (h *Huggingface) Init(ctx context.Context, meta conversation.Metadata) erro return err } - model := conversation.DefaultHuggingFaceModel - if m.Model != "" { - model = m.Model - } + // Resolve model via central helper (uses metadata, then env var, then default) + model := conversation.GetHuggingFaceModel(m.Model) endpoint := strings.Replace(defaultEndpoint, "{{model}}", model, 1) if m.Endpoint != "" { diff --git a/conversation/mistral/mistral.go b/conversation/mistral/mistral.go index 6e1628d086..d22b9ef9a4 100644 --- a/conversation/mistral/mistral.go +++ b/conversation/mistral/mistral.go @@ -50,10 +50,8 @@ func (m *Mistral) Init(ctx context.Context, meta conversation.Metadata) error { return err } - model := conversation.DefaultMistralModel - if md.Model != "" { - model = md.Model - } + // Resolve model via central helper (uses metadata, then env var, then default) + model := conversation.GetMistralModel(md.Model) llm, err := mistral.New( mistral.WithModel(model), diff --git a/conversation/models.go b/conversation/models.go index 20b2333bd1..0f5bfd7c61 100644 --- a/conversation/models.go +++ b/conversation/models.go @@ -23,6 +23,7 @@ import ( const ( // Environment variable names envOpenAIModel = "DAPR_CONVERSATION_OPENAI_MODEL" + envAzureOpenAIModel = "AZURE_OPENAI_MODEL" envAnthropicModel = "DAPR_CONVERSATION_ANTHROPIC_MODEL" envGoogleAIModel = "DAPR_CONVERSATION_GOOGLEAI_MODEL" envMistralModel = "DAPR_CONVERSATION_MISTRAL_MODEL" @@ -30,14 +31,16 @@ const ( envOllamaModel = "DAPR_CONVERSATION_OLLAMA_MODEL" ) -// Default model values (used as fallbacks when env vars are not set) +// Exported default model constants for consumers of the conversation package. +// These are used as fallbacks when env vars and metadata are not set. const ( - defaultOpenAIModel = "gpt-5-nano" // Enable GPT-5 (Preview) for all clients - defaultAnthropicModel = "claude-sonnet-4-20250514" - defaultGoogleAIModel = "gemini-2.5-flash-lite" - defaultMistralModel = "open-mistral-7b" - defaultHuggingFaceModel = "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" - defaultOllamaModel = "llama3.2:latest" + DefaultOpenAIModel = "gpt-5-nano" // Enable GPT-5 (Preview) for all clients + DefaultAzureOpenAIModel = "gpt-4.1-nano" // Default Azure OpenAI model + DefaultAnthropicModel = "claude-sonnet-4-20250514" + DefaultGoogleAIModel = "gemini-2.5-flash-lite" + DefaultMistralModel = "open-mistral-7b" + DefaultHuggingFaceModel = "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" + DefaultOllamaModel = "llama3.2:latest" ) // getEnvOrDefault returns the value of an environment variable or a default value @@ -54,25 +57,29 @@ func getModelValue(envVar, defaultValue, metadataValue string) string { // Example usage for model getters with metadata support: // Pass metadataValue from your metadata file/struct, or "" if not set. func GetOpenAIModel(metadataValue string) string { - return getModelValue(envOpenAIModel, defaultOpenAIModel, metadataValue) + return getModelValue(envOpenAIModel, DefaultOpenAIModel, metadataValue) +} + +func GetAzureOpenAIModel(metadataValue string) string { + return getModelValue(envAzureOpenAIModel, DefaultAzureOpenAIModel, metadataValue) } func GetAnthropicModel(metadataValue string) string { - return getModelValue(envAnthropicModel, defaultAnthropicModel, metadataValue) + return getModelValue(envAnthropicModel, DefaultAnthropicModel, metadataValue) } func GetGoogleAIModel(metadataValue string) string { - return getModelValue(envGoogleAIModel, defaultGoogleAIModel, metadataValue) + return getModelValue(envGoogleAIModel, DefaultGoogleAIModel, metadataValue) } func GetMistralModel(metadataValue string) string { - return getModelValue(envMistralModel, defaultMistralModel, metadataValue) + return getModelValue(envMistralModel, DefaultMistralModel, metadataValue) } func GetHuggingFaceModel(metadataValue string) string { - return getModelValue(envHuggingFaceModel, defaultHuggingFaceModel, metadataValue) + return getModelValue(envHuggingFaceModel, DefaultHuggingFaceModel, metadataValue) } func GetOllamaModel(metadataValue string) string { - return getModelValue(envOllamaModel, defaultOllamaModel, metadataValue) + return getModelValue(envOllamaModel, DefaultOllamaModel, metadataValue) } diff --git a/conversation/ollama/ollama.go b/conversation/ollama/ollama.go index fd8d347386..56b262bb3e 100644 --- a/conversation/ollama/ollama.go +++ b/conversation/ollama/ollama.go @@ -48,10 +48,8 @@ func (o *Ollama) Init(ctx context.Context, meta conversation.Metadata) error { return err } - model := conversation.DefaultOllamaModel - if md.Model != "" { - model = md.Model - } + // Resolve model via central helper (uses metadata, then env var, then default) + model := conversation.GetOllamaModel(md.Model) llm, err := ollama.New( ollama.WithModel(model), diff --git a/conversation/openai/openai.go b/conversation/openai/openai.go index aaa26f343e..be414b75bb 100644 --- a/conversation/openai/openai.go +++ b/conversation/openai/openai.go @@ -49,9 +49,12 @@ func (o *OpenAI) Init(ctx context.Context, meta conversation.Metadata) error { return err } - model := conversation.DefaultOpenAIModel - if md.Model != "" { - model = md.Model + // Resolve model via central helper (uses metadata, then env var, then default) + var model string + if md.APIType == "azure" { + model = conversation.GetAzureOpenAIModel(md.Model) + } else { + model = conversation.GetOpenAIModel(md.Model) } // Create options for OpenAI client options := []openai.Option{ diff --git a/tests/config/conversation/anthropic/anthropic.yml b/tests/config/conversation/anthropic/anthropic.yml index 207b1154d8..0d83f42402 100644 --- a/tests/config/conversation/anthropic/anthropic.yml +++ b/tests/config/conversation/anthropic/anthropic.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{ANTHROPIC_API_KEY}}" - name: model - value: "${{DAPR_CONVERSATION_ANTHROPIC_MODEL}}" \ No newline at end of file + value: "${{DAPR_CONVERSATION_ANTHROPIC_MODEL||}}" \ No newline at end of file diff --git a/tests/config/conversation/googleai/googleai.yml b/tests/config/conversation/googleai/googleai.yml index f6f1bd191c..aa40c466f0 100644 --- a/tests/config/conversation/googleai/googleai.yml +++ b/tests/config/conversation/googleai/googleai.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{GOOGLE_AI_API_KEY}}" - name: model - value: "${{DAPR_CONVERSATION_GOOGLEAI_MODEL}}" \ No newline at end of file + value: "${{DAPR_CONVERSATION_GOOGLEAI_MODEL||}}" \ No newline at end of file diff --git a/tests/config/conversation/huggingface/huggingface.yml b/tests/config/conversation/huggingface/huggingface.yml index 06e964835f..84293766bf 100644 --- a/tests/config/conversation/huggingface/huggingface.yml +++ b/tests/config/conversation/huggingface/huggingface.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{HUGGINGFACE_API_KEY}}" - name: model - value: "${{DAPR_CONVERSATION_HUGGINGFACE_MODEL}}" \ No newline at end of file + value: "${{DAPR_CONVERSATION_HUGGINGFACE_MODEL||}}" \ No newline at end of file diff --git a/tests/config/conversation/mistral/mistral.yml b/tests/config/conversation/mistral/mistral.yml index fd219b0a26..e7ac7c5d7d 100644 --- a/tests/config/conversation/mistral/mistral.yml +++ b/tests/config/conversation/mistral/mistral.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{MISTRAL_API_KEY}}" - name: model - value: "${{DAPR_CONVERSATION_MISTRAL_MODEL}}" \ No newline at end of file + value: "${{DAPR_CONVERSATION_MISTRAL_MODEL||}}" \ No newline at end of file diff --git a/tests/config/conversation/ollama/ollama.yml b/tests/config/conversation/ollama/ollama.yml index 176ecb2478..4e6db6d028 100644 --- a/tests/config/conversation/ollama/ollama.yml +++ b/tests/config/conversation/ollama/ollama.yml @@ -7,4 +7,4 @@ spec: version: v1 metadata: - name: model - value: "${{DAPR_CONVERSATION_OLLAMA_MODEL}}" \ No newline at end of file + value: "${{DAPR_CONVERSATION_OLLAMA_MODEL||}}" \ No newline at end of file diff --git a/tests/config/conversation/openai/azure/openai.yml b/tests/config/conversation/openai/azure/openai.yml index 106c59dc57..76fdee9315 100644 --- a/tests/config/conversation/openai/azure/openai.yml +++ b/tests/config/conversation/openai/azure/openai.yml @@ -9,7 +9,7 @@ spec: - name: key value: "${{AZURE_OPENAI_API_KEY}}" - name: model - value: "gpt-4o-mini" + value: "${{AZURE_OPENAI_MODEL||}}" - name: endpoint value: "${{AZURE_OPENAI_ENDPOINT}}" - name: apiType diff --git a/tests/config/conversation/openai/openai.yml b/tests/config/conversation/openai/openai.yml deleted file mode 100644 index ee3e3de880..0000000000 --- a/tests/config/conversation/openai/openai.yml +++ /dev/null @@ -1,12 +0,0 @@ -apiVersion: dapr.io/v1alpha1 -kind: Component -metadata: - name: openai -spec: - type: conversation.openai - version: v1 - metadata: - - name: key - value: "${{OPENAI_API_KEY}}" - - name: model - value: "${{DAPR_CONVERSATION_OPENAI_MODEL}}" \ No newline at end of file diff --git a/tests/config/conversation/openai/openai/openai.yml b/tests/config/conversation/openai/openai/openai.yml index ee3e3de880..21051c337a 100644 --- a/tests/config/conversation/openai/openai/openai.yml +++ b/tests/config/conversation/openai/openai/openai.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{OPENAI_API_KEY}}" - name: model - value: "${{DAPR_CONVERSATION_OPENAI_MODEL}}" \ No newline at end of file + value: "${{DAPR_CONVERSATION_OPENAI_MODEL||}}" \ No newline at end of file diff --git a/tests/conformance/common.go b/tests/conformance/common.go index d13d5574a4..823beeff9b 100644 --- a/tests/conformance/common.go +++ b/tests/conformance/common.go @@ -180,6 +180,21 @@ func parseMetadataProperty(val string) (string, error) { case strings.HasPrefix(val, "${{"): // look up env var with that name. remove ${{}} and space k := strings.TrimSpace(strings.TrimSuffix(strings.TrimPrefix(val, "${{"), "}}")) + + // Check if there's a default value specified with || syntax + if strings.Contains(k, "||") { + parts := strings.SplitN(k, "||", 2) + envVar := strings.TrimSpace(parts[0]) + defaultVal := strings.TrimSpace(parts[1]) + + v := LookUpEnv(envVar) + if v == "" { + return defaultVal, nil + } + return v, nil + } + + // Original behavior - require env var to be set v := LookUpEnv(k) if v == "" { return "", fmt.Errorf("required env var is not set %s", k) From 059b3a4154fc8d0923cf4428dfe4cef84b7aff81 Mon Sep 17 00:00:00 2001 From: Erin La <107987318+giterinhub@users.noreply.github.com> Date: Thu, 4 Sep 2025 20:31:17 +0000 Subject: [PATCH 07/13] Fix GPT-5 temperature issue in conversation conformance tests * Conditionally set Temperature=1 for OpenAI models to avoid unsupported temperature=0 * Preserve default behavior for other providers (Grok, Gemini, Anthropic, DeepSeek) * Reference upstream langchaingo fix: tmc/langchaingo#1374 Signed-off-by: Erin La <107987318+giterinhub@users.noreply.github.com> --- tests/config/conversation/openai/openai.yml | 0 .../conformance/conversation/conversation.go | 29 +++++++++++++++++++ 2 files changed, 29 insertions(+) create mode 100644 tests/config/conversation/openai/openai.yml diff --git a/tests/config/conversation/openai/openai.yml b/tests/config/conversation/openai/openai.yml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/conformance/conversation/conversation.go b/tests/conformance/conversation/conversation.go index 4b9cfda368..a135827234 100644 --- a/tests/conformance/conversation/conversation.go +++ b/tests/conformance/conversation/conversation.go @@ -69,6 +69,8 @@ func ConformanceTests(t *testing.T, props map[string]string, conv conversation.C ctx, cancel := context.WithTimeout(t.Context(), 25*time.Second) defer cancel() + // Note: Temperature is set to 1 for OpenAI models to avoid issues with GPT-5 which does not support temperature=0. + // This can be removed once langchaingo is updated to handle this automatically (tmc/langchaingo#1374). req := &conversation.Request{ Message: &[]llms.MessageContent{ { @@ -79,6 +81,9 @@ func ConformanceTests(t *testing.T, props map[string]string, conv conversation.C }, }, } + if component == "openai" { + req.Temperature = 1 + } resp, err := conv.Converse(ctx, req) require.NoError(t, err) @@ -100,6 +105,9 @@ func ConformanceTests(t *testing.T, props map[string]string, conv conversation.C req := &conversation.Request{ Message: &userMsgs, } + if component == "openai" { + req.Temperature = 1 + } resp, err := conv.Converse(ctx, req) require.NoError(t, err) @@ -132,6 +140,9 @@ func ConformanceTests(t *testing.T, props map[string]string, conv conversation.C req := &conversation.Request{ Message: &systemMsgs, } + if component == "openai" { + req.Temperature = 1 + } resp, err := conv.Converse(ctx, req) require.NoError(t, err) @@ -223,6 +234,9 @@ func ConformanceTests(t *testing.T, props map[string]string, conv conversation.C req := &conversation.Request{ Message: &assistantMsgs, } + if component == "openai" { + req.Temperature = 1 + } resp, err := conv.Converse(ctx, req) require.NoError(t, err) @@ -254,6 +268,9 @@ func ConformanceTests(t *testing.T, props map[string]string, conv conversation.C req := &conversation.Request{ Message: &developerMsgs, } + if component == "openai" { + req.Temperature = 1 + } resp, err := conv.Converse(ctx, req) require.NoError(t, err) @@ -303,6 +320,9 @@ func ConformanceTests(t *testing.T, props map[string]string, conv conversation.C Message: &messages, Tools: &tools, } + if component == "openai" { + req.Temperature = 1 + } resp, err := conv.Converse(ctx, req) require.NoError(t, err) @@ -362,6 +382,9 @@ func ConformanceTests(t *testing.T, props map[string]string, conv conversation.C req2 := &conversation.Request{ Message: &responseMessages, } + if component == "openai" { + req2.Temperature = 1 + } resp2, err2 := conv.Converse(ctx, req2) require.NoError(t, err2) @@ -413,6 +436,9 @@ func ConformanceTests(t *testing.T, props map[string]string, conv conversation.C Message: &messages, Tools: &tools, } + if component == "openai" { + req1.Temperature = 1 + } resp1, err := conv.Converse(ctx, req1) require.NoError(t, err) @@ -486,6 +512,9 @@ func ConformanceTests(t *testing.T, props map[string]string, conv conversation.C req2 := &conversation.Request{ Message: &toolResponseMessages, } + if component == "openai" { + req2.Temperature = 1 + } resp2, err := conv.Converse(ctx, req2) require.NoError(t, err) From 571ebe1c0c94d30e2812365bad6ced8029f953e5 Mon Sep 17 00:00:00 2001 From: Erin La <107987318+giterinhub@users.noreply.github.com> Date: Thu, 4 Sep 2025 20:46:30 +0000 Subject: [PATCH 08/13] feat: update environment variable names for conversation models and improve documentation Signed-off-by: Erin La <107987318+giterinhub@users.noreply.github.com> --- conversation/googleai/metadata.yaml | 2 +- conversation/huggingface/metadata.yaml | 2 +- conversation/mistral/metadata.yaml | 2 +- conversation/models.go | 18 +++++++++--------- conversation/ollama/metadata.yaml | 2 +- conversation/openai/metadata.yaml | 7 ++++--- tests/config/conversation/README.md | 17 +++++++++++------ .../conversation/anthropic/anthropic.yml | 2 +- .../config/conversation/googleai/googleai.yml | 2 +- .../conversation/huggingface/huggingface.yml | 2 +- tests/config/conversation/mistral/mistral.yml | 2 +- tests/config/conversation/ollama/ollama.yml | 2 +- tests/config/conversation/openai/openai.yml | 0 .../conversation/openai/openai/openai.yml | 2 +- 14 files changed, 34 insertions(+), 28 deletions(-) delete mode 100644 tests/config/conversation/openai/openai.yml diff --git a/conversation/googleai/metadata.yaml b/conversation/googleai/metadata.yaml index 41ea72ccc0..6a1417dc4a 100644 --- a/conversation/googleai/metadata.yaml +++ b/conversation/googleai/metadata.yaml @@ -26,7 +26,7 @@ metadata: description: | The GoogleAI LLM to use. type: string - example: '${{DAPR_CONVERSATION_GOOGLEAI_MODEL}}' + example: '${{APR_CONVERSATION_GOOGLEAI_MODEL}}' - name: cacheTTL required: false description: | diff --git a/conversation/huggingface/metadata.yaml b/conversation/huggingface/metadata.yaml index 1f17ea4d51..efe77a317c 100644 --- a/conversation/huggingface/metadata.yaml +++ b/conversation/huggingface/metadata.yaml @@ -26,7 +26,7 @@ metadata: description: | The Huggingface model to use. Uses OpenAI-compatible API. type: string - example: '${{DAPR_CONVERSATION_HUGGINGFACE_MODEL}}' + example: '${{HUGGINGFACE_MODEL}}' - name: endpoint required: false description: | diff --git a/conversation/mistral/metadata.yaml b/conversation/mistral/metadata.yaml index 3e64ba0944..29e8364a20 100644 --- a/conversation/mistral/metadata.yaml +++ b/conversation/mistral/metadata.yaml @@ -26,7 +26,7 @@ metadata: description: | The Mistral LLM to use. type: string - example: '${{DAPR_CONVERSATION_MISTRAL_MODEL}}' + example: '${{APR_CONVERSATION_MISTRAL_MODEL}}' default: 'open-mistral-7b' - name: cacheTTL required: false diff --git a/conversation/models.go b/conversation/models.go index 0f5bfd7c61..95be0dce32 100644 --- a/conversation/models.go +++ b/conversation/models.go @@ -1,5 +1,5 @@ /* -Copyright 2024 The Dapr Authors +Copyright 2025 The Dapr Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at @@ -22,13 +22,13 @@ import ( // These can be overridden via environment variables for runtime configuration const ( // Environment variable names - envOpenAIModel = "DAPR_CONVERSATION_OPENAI_MODEL" + envOpenAIModel = "OPENAI_MODEL" envAzureOpenAIModel = "AZURE_OPENAI_MODEL" - envAnthropicModel = "DAPR_CONVERSATION_ANTHROPIC_MODEL" - envGoogleAIModel = "DAPR_CONVERSATION_GOOGLEAI_MODEL" - envMistralModel = "DAPR_CONVERSATION_MISTRAL_MODEL" - envHuggingFaceModel = "DAPR_CONVERSATION_HUGGINGFACE_MODEL" - envOllamaModel = "DAPR_CONVERSATION_OLLAMA_MODEL" + envAnthropicModel = "ANTHROPIC_MODEL" + envGoogleAIModel = "GOOGLEAI_MODEL" + envMistralModel = "MISTRAL_MODEL" + envHuggingFaceModel = "HUGGINGFACE_MODEL" + envOllamaModel = "OLLAMA_MODEL" ) // Exported default model constants for consumers of the conversation package. @@ -43,8 +43,8 @@ const ( DefaultOllamaModel = "llama3.2:latest" ) -// getEnvOrDefault returns the value of an environment variable or a default value -func getModelValue(envVar, defaultValue, metadataValue string) string { +// getModel returns the value of an environment variable or a default value +func getModel(envVar, defaultValue, metadataValue string) string { if metadataValue != "" { return metadataValue } diff --git a/conversation/ollama/metadata.yaml b/conversation/ollama/metadata.yaml index 59690bb4ca..4195280ba7 100644 --- a/conversation/ollama/metadata.yaml +++ b/conversation/ollama/metadata.yaml @@ -14,7 +14,7 @@ metadata: description: | The Ollama LLM to use. type: string - example: '${{DAPR_CONVERSATION_OLLAMA_MODEL}}' + example: '${{OLLAMA_MODEL}}' - name: cacheTTL required: false description: | diff --git a/conversation/openai/metadata.yaml b/conversation/openai/metadata.yaml index 352c39ec8f..0be4b9dcea 100644 --- a/conversation/openai/metadata.yaml +++ b/conversation/openai/metadata.yaml @@ -24,9 +24,10 @@ metadata: - name: model required: false description: | - The OpenAI LLM to use. Defaults to gpt-5-nano (configurable via DAPR_CONVERSATION_OPENAI_MODEL environment variable) + The OpenAI LLM to use. Configurable via OPENAI_MODEL environment variable) type: string - example: '${{DAPR_CONVERSATION_OPENAI_MODEL}}' + default: 'gpt-5-nano' + example: '${{OPENAI_MODEL}}' - name: endpoint required: false description: | @@ -52,4 +53,4 @@ metadata: The type of API to use for the OpenAI service. This is required when using Azure OpenAI. type: string example: 'azure' - default: '' \ No newline at end of file + default: 'gpt-4.1-nano' \ No newline at end of file diff --git a/tests/config/conversation/README.md b/tests/config/conversation/README.md index ac00bec4d7..a1991f23e0 100644 --- a/tests/config/conversation/README.md +++ b/tests/config/conversation/README.md @@ -35,6 +35,7 @@ The tests will automatically skip components for which the required environment ### Using a .env file (Recommended) 1. Copy the template file: + ```bash cp env.template .env ``` @@ -51,12 +52,13 @@ Alternatively, you can set the following environment variables to run the respec You can override the default models used by each component by setting these environment variables: ```bash -export DAPR_CONVERSATION_OPENAI_MODEL="gpt-5-nano" # Default: gpt-5-nano -export DAPR_CONVERSATION_ANTHROPIC_MODEL="claude-3-5-sonnet-20240620" # Default: claude-3-5-sonnet-20240620 -export DAPR_CONVERSATION_GOOGLEAI_MODEL="gemini-1.5-flash" # Default: gemini-1.5-flash -export DAPR_CONVERSATION_MISTRAL_MODEL="open-mistral-7b" # Default: open-mistral-7b -export DAPR_CONVERSATION_HUGGINGFACE_MODEL="deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" # Default: deepseek-ai/DeepSeek-R1-Distill-Qwen-32B -export DAPR_CONVERSATION_OLLAMA_MODEL="llama3.2:latest" # Default: llama3.2:latest +export OPENAI_MODEL="gpt-5-nano" # Default: gpt-5-nano +export AZURE_OPENAI_MODEL="gpt-4.1-nano" # Default: gpt-4.1-nano +export ANTHROPIC_MODEL="claude-3-5-sonnet-20240620" # Default: claude-3-5-sonnet-20240620 +export GOOGLEAI_MODEL="gemini-1.5-flash" # Default: gemini-1.5-flash +export MISTRAL_MODEL="open-mistral-7b" # Default: open-mistral-7b +export HUGGINGFACE_MODEL="deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" # Default: deepseek-ai/DeepSeek-R1-Distill-Qwen-32B +export OLLAMA_MODEL="llama3.2:latest" # Default: llama3.2:latest ``` #### OpenAI @@ -64,14 +66,17 @@ export DAPR_CONVERSATION_OLLAMA_MODEL="llama3.2:latest" # Default: llama3.2 ```bash export OPENAI_API_KEY="your_openai_api_key" ``` + Get your API key from: https://platform.openai.com/api-keys ### Azure OpenAI + ```bash export AZURE_OPENAI_API_KEY="your_openai_api_key" export AZURE_OPENAI_ENDPOINT="your_azureopenai_endpoint_here" export AZURE_OPENAI_API_VERSION="your_azreopenai_api_version_here" ``` + Get your configuration values from: https://ai.azure.com/ ```bash diff --git a/tests/config/conversation/anthropic/anthropic.yml b/tests/config/conversation/anthropic/anthropic.yml index 0d83f42402..0fd65f41ab 100644 --- a/tests/config/conversation/anthropic/anthropic.yml +++ b/tests/config/conversation/anthropic/anthropic.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{ANTHROPIC_API_KEY}}" - name: model - value: "${{DAPR_CONVERSATION_ANTHROPIC_MODEL||}}" \ No newline at end of file + value: "${{ANTHROPIC_MODEL||}}" \ No newline at end of file diff --git a/tests/config/conversation/googleai/googleai.yml b/tests/config/conversation/googleai/googleai.yml index aa40c466f0..b41df689cb 100644 --- a/tests/config/conversation/googleai/googleai.yml +++ b/tests/config/conversation/googleai/googleai.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{GOOGLE_AI_API_KEY}}" - name: model - value: "${{DAPR_CONVERSATION_GOOGLEAI_MODEL||}}" \ No newline at end of file + value: "${{GOOGLEAI_MODEL||}}" \ No newline at end of file diff --git a/tests/config/conversation/huggingface/huggingface.yml b/tests/config/conversation/huggingface/huggingface.yml index 84293766bf..c2f4080992 100644 --- a/tests/config/conversation/huggingface/huggingface.yml +++ b/tests/config/conversation/huggingface/huggingface.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{HUGGINGFACE_API_KEY}}" - name: model - value: "${{DAPR_CONVERSATION_HUGGINGFACE_MODEL||}}" \ No newline at end of file + value: "${{HUGGINGFACE_MODEL||}}" \ No newline at end of file diff --git a/tests/config/conversation/mistral/mistral.yml b/tests/config/conversation/mistral/mistral.yml index e7ac7c5d7d..2b859ebc72 100644 --- a/tests/config/conversation/mistral/mistral.yml +++ b/tests/config/conversation/mistral/mistral.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{MISTRAL_API_KEY}}" - name: model - value: "${{DAPR_CONVERSATION_MISTRAL_MODEL||}}" \ No newline at end of file + value: "${{MISTRAL_MODEL||}}" \ No newline at end of file diff --git a/tests/config/conversation/ollama/ollama.yml b/tests/config/conversation/ollama/ollama.yml index 4e6db6d028..5e0f999673 100644 --- a/tests/config/conversation/ollama/ollama.yml +++ b/tests/config/conversation/ollama/ollama.yml @@ -7,4 +7,4 @@ spec: version: v1 metadata: - name: model - value: "${{DAPR_CONVERSATION_OLLAMA_MODEL||}}" \ No newline at end of file + value: "${{OLLAMA_MODEL||}}" \ No newline at end of file diff --git a/tests/config/conversation/openai/openai.yml b/tests/config/conversation/openai/openai.yml deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/config/conversation/openai/openai/openai.yml b/tests/config/conversation/openai/openai/openai.yml index 21051c337a..610569214b 100644 --- a/tests/config/conversation/openai/openai/openai.yml +++ b/tests/config/conversation/openai/openai/openai.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{OPENAI_API_KEY}}" - name: model - value: "${{DAPR_CONVERSATION_OPENAI_MODEL||}}" \ No newline at end of file + value: "${{OPENAI_MODEL||}}" \ No newline at end of file From 4b860f48ad8893b3fd6b0d2644a3f37bcf7f39c9 Mon Sep 17 00:00:00 2001 From: Erin La <107987318+giterinhub@users.noreply.github.com> Date: Thu, 4 Sep 2025 20:53:30 +0000 Subject: [PATCH 09/13] Standardize conversation component metadata with env vars and defaults - Add environment variable references to all model descriptions - Update defaults to match models.go constants - Fix environment variable names in examples - Remove typo in OpenAI description Signed-off-by: Erin La <107987318+giterinhub@users.noreply.github.com> --- conversation/anthropic/metadata.yaml | 6 +++--- conversation/googleai/metadata.yaml | 5 +++-- conversation/huggingface/metadata.yaml | 3 ++- conversation/mistral/metadata.yaml | 4 ++-- conversation/ollama/metadata.yaml | 3 ++- conversation/openai/metadata.yaml | 2 +- 6 files changed, 13 insertions(+), 10 deletions(-) diff --git a/conversation/anthropic/metadata.yaml b/conversation/anthropic/metadata.yaml index 729971ca47..bb18550b2a 100644 --- a/conversation/anthropic/metadata.yaml +++ b/conversation/anthropic/metadata.yaml @@ -24,10 +24,10 @@ metadata: - name: model required: false description: | - The Anthropic LLM to use. + The Anthropic LLM to use. Configurable via ANTHROPIC_MODEL environment variable. type: string - example: 'claude-3-5-sonnet-20240620' - default: 'claude-3-5-sonnet-20240620' + example: '${{ANTHROPIC_MODEL}}' + default: 'claude-sonnet-4-20250514' - name: cacheTTL required: false description: | diff --git a/conversation/googleai/metadata.yaml b/conversation/googleai/metadata.yaml index 6a1417dc4a..8dee5505a0 100644 --- a/conversation/googleai/metadata.yaml +++ b/conversation/googleai/metadata.yaml @@ -24,9 +24,10 @@ metadata: - name: model required: false description: | - The GoogleAI LLM to use. + The GoogleAI LLM to use. Configurable via GOOGLEAI_MODEL environment variable. type: string - example: '${{APR_CONVERSATION_GOOGLEAI_MODEL}}' + example: '${{GOOGLEAI_MODEL}}' + default: 'gemini-2.5-flash-lite' - name: cacheTTL required: false description: | diff --git a/conversation/huggingface/metadata.yaml b/conversation/huggingface/metadata.yaml index efe77a317c..1685b77969 100644 --- a/conversation/huggingface/metadata.yaml +++ b/conversation/huggingface/metadata.yaml @@ -24,9 +24,10 @@ metadata: - name: model required: false description: | - The Huggingface model to use. Uses OpenAI-compatible API. + The Huggingface model to use. Uses OpenAI-compatible API. Configurable via HUGGINGFACE_MODEL environment variable. type: string example: '${{HUGGINGFACE_MODEL}}' + default: 'deepseek-ai/DeepSeek-R1-Distill-Qwen-32B' - name: endpoint required: false description: | diff --git a/conversation/mistral/metadata.yaml b/conversation/mistral/metadata.yaml index 29e8364a20..9b824d0baf 100644 --- a/conversation/mistral/metadata.yaml +++ b/conversation/mistral/metadata.yaml @@ -24,9 +24,9 @@ metadata: - name: model required: false description: | - The Mistral LLM to use. + The Mistral LLM to use. Configurable via MISTRAL_MODEL environment variable. type: string - example: '${{APR_CONVERSATION_MISTRAL_MODEL}}' + example: '${{MISTRAL_MODEL}}' default: 'open-mistral-7b' - name: cacheTTL required: false diff --git a/conversation/ollama/metadata.yaml b/conversation/ollama/metadata.yaml index 4195280ba7..82ccf3432d 100644 --- a/conversation/ollama/metadata.yaml +++ b/conversation/ollama/metadata.yaml @@ -12,9 +12,10 @@ metadata: - name: model required: false description: | - The Ollama LLM to use. + The Ollama LLM to use. Configurable via OLLAMA_MODEL environment variable. type: string example: '${{OLLAMA_MODEL}}' + default: 'llama3.2:latest' - name: cacheTTL required: false description: | diff --git a/conversation/openai/metadata.yaml b/conversation/openai/metadata.yaml index 0be4b9dcea..1f5efffb02 100644 --- a/conversation/openai/metadata.yaml +++ b/conversation/openai/metadata.yaml @@ -24,7 +24,7 @@ metadata: - name: model required: false description: | - The OpenAI LLM to use. Configurable via OPENAI_MODEL environment variable) + The OpenAI LLM to use. Configurable via OPENAI_MODEL environment variable. type: string default: 'gpt-5-nano' example: '${{OPENAI_MODEL}}' From d4a7ae58c2856c71d50d43a1d5745694b705a103 Mon Sep 17 00:00:00 2001 From: Erin La <107987318+giterinhub@users.noreply.github.com> Date: Thu, 4 Sep 2025 21:00:42 +0000 Subject: [PATCH 10/13] refactor: replace getModelValue with getModel for consistency in model retrieval functions Signed-off-by: Erin La <107987318+giterinhub@users.noreply.github.com> --- conversation/models.go | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/conversation/models.go b/conversation/models.go index 95be0dce32..4561f0f0a6 100644 --- a/conversation/models.go +++ b/conversation/models.go @@ -57,29 +57,29 @@ func getModel(envVar, defaultValue, metadataValue string) string { // Example usage for model getters with metadata support: // Pass metadataValue from your metadata file/struct, or "" if not set. func GetOpenAIModel(metadataValue string) string { - return getModelValue(envOpenAIModel, DefaultOpenAIModel, metadataValue) + return getModel(envOpenAIModel, DefaultOpenAIModel, metadataValue) } func GetAzureOpenAIModel(metadataValue string) string { - return getModelValue(envAzureOpenAIModel, DefaultAzureOpenAIModel, metadataValue) + return getModel(envAzureOpenAIModel, DefaultAzureOpenAIModel, metadataValue) } func GetAnthropicModel(metadataValue string) string { - return getModelValue(envAnthropicModel, DefaultAnthropicModel, metadataValue) + return getModel(envAnthropicModel, DefaultAnthropicModel, metadataValue) } func GetGoogleAIModel(metadataValue string) string { - return getModelValue(envGoogleAIModel, DefaultGoogleAIModel, metadataValue) + return getModel(envGoogleAIModel, DefaultGoogleAIModel, metadataValue) } func GetMistralModel(metadataValue string) string { - return getModelValue(envMistralModel, DefaultMistralModel, metadataValue) + return getModel(envMistralModel, DefaultMistralModel, metadataValue) } func GetHuggingFaceModel(metadataValue string) string { - return getModelValue(envHuggingFaceModel, DefaultHuggingFaceModel, metadataValue) + return getModel(envHuggingFaceModel, DefaultHuggingFaceModel, metadataValue) } func GetOllamaModel(metadataValue string) string { - return getModelValue(envOllamaModel, DefaultOllamaModel, metadataValue) + return getModel(envOllamaModel, DefaultOllamaModel, metadataValue) } From 09e0376b88df9561c664243330f9949faf16db2a Mon Sep 17 00:00:00 2001 From: Erin La <107987318+giterinhub@users.noreply.github.com> Date: Tue, 9 Sep 2025 20:02:00 +0000 Subject: [PATCH 11/13] refactor: update conversation test configs to use centralized model defaults - Set model value to empty string in all provider YAML test configs (openai, azure, anthropic, googleai, mistral, huggingface, ollama) Signed-off-by: Erin La <107987318+giterinhub@users.noreply.github.com> --- tests/config/conversation/anthropic/anthropic.yml | 2 +- tests/config/conversation/googleai/googleai.yml | 2 +- tests/config/conversation/huggingface/huggingface.yml | 2 +- tests/config/conversation/mistral/mistral.yml | 2 +- tests/config/conversation/ollama/ollama.yml | 2 +- tests/config/conversation/openai/azure/openai.yml | 2 +- tests/config/conversation/openai/openai/openai.yml | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/config/conversation/anthropic/anthropic.yml b/tests/config/conversation/anthropic/anthropic.yml index 0fd65f41ab..bff41c83a4 100644 --- a/tests/config/conversation/anthropic/anthropic.yml +++ b/tests/config/conversation/anthropic/anthropic.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{ANTHROPIC_API_KEY}}" - name: model - value: "${{ANTHROPIC_MODEL||}}" \ No newline at end of file + value: "" # use default for provider or customize via environment variable as defined in conversation/models.go \ No newline at end of file diff --git a/tests/config/conversation/googleai/googleai.yml b/tests/config/conversation/googleai/googleai.yml index b41df689cb..ab9469b881 100644 --- a/tests/config/conversation/googleai/googleai.yml +++ b/tests/config/conversation/googleai/googleai.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{GOOGLE_AI_API_KEY}}" - name: model - value: "${{GOOGLEAI_MODEL||}}" \ No newline at end of file + value: "" # use default for provider or customize via environment variable as defined in conversation/models.go \ No newline at end of file diff --git a/tests/config/conversation/huggingface/huggingface.yml b/tests/config/conversation/huggingface/huggingface.yml index c2f4080992..2a1eee1884 100644 --- a/tests/config/conversation/huggingface/huggingface.yml +++ b/tests/config/conversation/huggingface/huggingface.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{HUGGINGFACE_API_KEY}}" - name: model - value: "${{HUGGINGFACE_MODEL||}}" \ No newline at end of file + value: "" # use default for provider or customize via environment variable as defined in conversation/models.go \ No newline at end of file diff --git a/tests/config/conversation/mistral/mistral.yml b/tests/config/conversation/mistral/mistral.yml index 2b859ebc72..d484f6bae1 100644 --- a/tests/config/conversation/mistral/mistral.yml +++ b/tests/config/conversation/mistral/mistral.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{MISTRAL_API_KEY}}" - name: model - value: "${{MISTRAL_MODEL||}}" \ No newline at end of file + value: "" # use default for provider or customize via environment variable as defined in conversation/models.go \ No newline at end of file diff --git a/tests/config/conversation/ollama/ollama.yml b/tests/config/conversation/ollama/ollama.yml index 5e0f999673..6d4bfc7c41 100644 --- a/tests/config/conversation/ollama/ollama.yml +++ b/tests/config/conversation/ollama/ollama.yml @@ -7,4 +7,4 @@ spec: version: v1 metadata: - name: model - value: "${{OLLAMA_MODEL||}}" \ No newline at end of file + value: "" # use default for provider or customize via environment variable as defined in conversation/models.go \ No newline at end of file diff --git a/tests/config/conversation/openai/azure/openai.yml b/tests/config/conversation/openai/azure/openai.yml index 76fdee9315..9545d3f3a4 100644 --- a/tests/config/conversation/openai/azure/openai.yml +++ b/tests/config/conversation/openai/azure/openai.yml @@ -9,7 +9,7 @@ spec: - name: key value: "${{AZURE_OPENAI_API_KEY}}" - name: model - value: "${{AZURE_OPENAI_MODEL||}}" + value: "" # use default for provider or customize via environment variable as defined in conversation/models.go - name: endpoint value: "${{AZURE_OPENAI_ENDPOINT}}" - name: apiType diff --git a/tests/config/conversation/openai/openai/openai.yml b/tests/config/conversation/openai/openai/openai.yml index 610569214b..ae84a73ff0 100644 --- a/tests/config/conversation/openai/openai/openai.yml +++ b/tests/config/conversation/openai/openai/openai.yml @@ -9,4 +9,4 @@ spec: - name: key value: "${{OPENAI_API_KEY}}" - name: model - value: "${{OPENAI_MODEL||}}" \ No newline at end of file + value: "" # use default for provider or customize via environment variable as defined in conversation/models.go \ No newline at end of file From 3f8b574256cf26857cdcff0d081b68647eaca408 Mon Sep 17 00:00:00 2001 From: Erin La <107987318+giterinhub@users.noreply.github.com> Date: Wed, 10 Sep 2025 17:53:56 +0000 Subject: [PATCH 12/13] fix: correct model resolution precedence and update metadata examples - Swap getModel() order: metadatav> env var > default - Remove || syntax from test framework - Update metadata.yaml examples to use actual defaults Signed-off-by: Erin La <107987318+giterinhub@users.noreply.github.com> --- conversation/anthropic/metadata.yaml | 2 +- conversation/googleai/metadata.yaml | 2 +- conversation/huggingface/metadata.yaml | 2 +- conversation/mistral/metadata.yaml | 2 +- conversation/ollama/metadata.yaml | 2 +- conversation/openai/metadata.yaml | 2 +- tests/config/conversation/openai/openai.yml | 0 tests/conformance/common.go | 15 --------------- 8 files changed, 6 insertions(+), 21 deletions(-) create mode 100644 tests/config/conversation/openai/openai.yml diff --git a/conversation/anthropic/metadata.yaml b/conversation/anthropic/metadata.yaml index bb18550b2a..db46bb8916 100644 --- a/conversation/anthropic/metadata.yaml +++ b/conversation/anthropic/metadata.yaml @@ -26,7 +26,7 @@ metadata: description: | The Anthropic LLM to use. Configurable via ANTHROPIC_MODEL environment variable. type: string - example: '${{ANTHROPIC_MODEL}}' + example: 'claude-sonnet-4-20250514' default: 'claude-sonnet-4-20250514' - name: cacheTTL required: false diff --git a/conversation/googleai/metadata.yaml b/conversation/googleai/metadata.yaml index 8dee5505a0..61703b5ac9 100644 --- a/conversation/googleai/metadata.yaml +++ b/conversation/googleai/metadata.yaml @@ -26,7 +26,7 @@ metadata: description: | The GoogleAI LLM to use. Configurable via GOOGLEAI_MODEL environment variable. type: string - example: '${{GOOGLEAI_MODEL}}' + example: 'gemini-2.5-flash-lite' default: 'gemini-2.5-flash-lite' - name: cacheTTL required: false diff --git a/conversation/huggingface/metadata.yaml b/conversation/huggingface/metadata.yaml index 1685b77969..e37ccc6c6c 100644 --- a/conversation/huggingface/metadata.yaml +++ b/conversation/huggingface/metadata.yaml @@ -26,7 +26,7 @@ metadata: description: | The Huggingface model to use. Uses OpenAI-compatible API. Configurable via HUGGINGFACE_MODEL environment variable. type: string - example: '${{HUGGINGFACE_MODEL}}' + example: 'deepseek-ai/DeepSeek-R1-Distill-Qwen-32B' default: 'deepseek-ai/DeepSeek-R1-Distill-Qwen-32B' - name: endpoint required: false diff --git a/conversation/mistral/metadata.yaml b/conversation/mistral/metadata.yaml index 9b824d0baf..28e2e1b47f 100644 --- a/conversation/mistral/metadata.yaml +++ b/conversation/mistral/metadata.yaml @@ -26,7 +26,7 @@ metadata: description: | The Mistral LLM to use. Configurable via MISTRAL_MODEL environment variable. type: string - example: '${{MISTRAL_MODEL}}' + example: 'open-mistral-7b' default: 'open-mistral-7b' - name: cacheTTL required: false diff --git a/conversation/ollama/metadata.yaml b/conversation/ollama/metadata.yaml index 82ccf3432d..3f120aa6cc 100644 --- a/conversation/ollama/metadata.yaml +++ b/conversation/ollama/metadata.yaml @@ -14,7 +14,7 @@ metadata: description: | The Ollama LLM to use. Configurable via OLLAMA_MODEL environment variable. type: string - example: '${{OLLAMA_MODEL}}' + example: 'llama3.2:latest' default: 'llama3.2:latest' - name: cacheTTL required: false diff --git a/conversation/openai/metadata.yaml b/conversation/openai/metadata.yaml index 1f5efffb02..826a534888 100644 --- a/conversation/openai/metadata.yaml +++ b/conversation/openai/metadata.yaml @@ -27,7 +27,7 @@ metadata: The OpenAI LLM to use. Configurable via OPENAI_MODEL environment variable. type: string default: 'gpt-5-nano' - example: '${{OPENAI_MODEL}}' + example: 'gpt-5-nano' - name: endpoint required: false description: | diff --git a/tests/config/conversation/openai/openai.yml b/tests/config/conversation/openai/openai.yml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/conformance/common.go b/tests/conformance/common.go index 823beeff9b..d13d5574a4 100644 --- a/tests/conformance/common.go +++ b/tests/conformance/common.go @@ -180,21 +180,6 @@ func parseMetadataProperty(val string) (string, error) { case strings.HasPrefix(val, "${{"): // look up env var with that name. remove ${{}} and space k := strings.TrimSpace(strings.TrimSuffix(strings.TrimPrefix(val, "${{"), "}}")) - - // Check if there's a default value specified with || syntax - if strings.Contains(k, "||") { - parts := strings.SplitN(k, "||", 2) - envVar := strings.TrimSpace(parts[0]) - defaultVal := strings.TrimSpace(parts[1]) - - v := LookUpEnv(envVar) - if v == "" { - return defaultVal, nil - } - return v, nil - } - - // Original behavior - require env var to be set v := LookUpEnv(k) if v == "" { return "", fmt.Errorf("required env var is not set %s", k) From 8c81a6e24c1b776291d227b5efe05c0272fb6226 Mon Sep 17 00:00:00 2001 From: Erin La <107987318+giterinhub@users.noreply.github.com> Date: Thu, 11 Sep 2025 14:32:09 +0000 Subject: [PATCH 13/13] fix: reorder logic in getModel function to prioritize Environment Variable value Signed-off-by: Erin La <107987318+giterinhub@users.noreply.github.com> --- conversation/models.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/conversation/models.go b/conversation/models.go index 4561f0f0a6..3042c0fbd5 100644 --- a/conversation/models.go +++ b/conversation/models.go @@ -45,12 +45,12 @@ const ( // getModel returns the value of an environment variable or a default value func getModel(envVar, defaultValue, metadataValue string) string { - if metadataValue != "" { - return metadataValue - } if value := os.Getenv(envVar); value != "" { return value } + if metadataValue != "" { + return metadataValue + } return defaultValue }