Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions frontend/src/assets/png/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,5 +39,6 @@ export { default as OpenAiPng } from "./model-providers/openai.png";
export { default as OpenAiCompatiblePng } from "./model-providers/openai-compatible.png";
export { default as OpenRouterPng } from "./model-providers/openrouter.png";
export { default as SiliconFlowPng } from "./model-providers/siliconflow.png";
export { default as OllamaSvg } from "./model-providers/ollama.png";

export { default as TrendPng } from "./trend.png";
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
2 changes: 2 additions & 0 deletions frontend/src/constants/icons.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import {
OpenRouterPng,
SiliconFlowPng,
ValueCellAgentPng,
OllamaSvg,
} from "@/assets/png";

export const MODEL_PROVIDER_ICONS = {
Expand All @@ -26,6 +27,7 @@ export const MODEL_PROVIDER_ICONS = {
google: GooglePng,
azure: AzurePng,
dashscope: DashScopePng,
ollama: OllamaSvg
};

export const EXCHANGE_ICONS = {
Expand Down
3 changes: 3 additions & 0 deletions python/configs/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,9 @@ models:
dashscope:
config_file: "providers/dashscope.yaml"
api_key_env: "DASHSCOPE_API_KEY"

ollama:
config_file: "providers/ollama.yaml"

# Agent Configuration
agents:
Expand Down
7 changes: 7 additions & 0 deletions python/configs/providers/ollama.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
name: Ollama
provider_type: Ollama
enabled: true
default_model: qwen3:4b
models:
- id: qwen3:4b
name: qwen3:4b
2 changes: 1 addition & 1 deletion python/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ dependencies = [
"yfinance>=0.2.65",
"requests>=2.32.5",
"akshare>=1.17.87",
"agno[openai, google, lancedb]>=2.0,<3.0",
"agno[openai, google, lancedb, ollama]>=2.0,<3.0",
"edgartools>=4.12.2",
"sqlalchemy>=2.0.43",
"aiosqlite>=0.19.0",
Expand Down
22 changes: 19 additions & 3 deletions python/uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions python/valuecell/adapters/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
OpenAICompatibleProvider,
OpenAIProvider,
OpenRouterProvider,
OllamaProvider,
SiliconFlowProvider,
create_model,
create_model_for_agent,
Expand All @@ -49,6 +50,7 @@
"SiliconFlowProvider",
"DeepSeekProvider",
"DashScopeProvider",
"OllamaProvider",
# Convenience functions
"create_model",
"create_model_for_agent",
Expand Down
24 changes: 24 additions & 0 deletions python/valuecell/adapters/models/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -564,6 +564,29 @@ def create_embedder(self, model_id: Optional[str] = None, **kwargs):
)


class OllamaProvider(ModelProvider):
"""Ollama model provider"""

def create_model(self, model_id: Optional[str] = None, **kwargs):
"""Create Ollama model via agno"""
try:
from agno.models.ollama import Ollama
except ImportError:
raise ImportError(
"agno package not installed, install with: pip install agno"
)

model_id = model_id or self.config.default_model

logger.info(f"Creating Ollama model: {model_id}")

return Ollama(id=model_id)

def is_available(self) -> bool:
"""Ollama doesn't require API key, just needs host configured"""
return bool(self.config.parameters.get("host"))


class ModelFactory:
"""
Factory for creating model instances with provider abstraction
Expand All @@ -585,6 +608,7 @@ class ModelFactory:
"openai-compatible": OpenAICompatibleProvider,
"deepseek": DeepSeekProvider,
"dashscope": DashScopeProvider,
"ollama": OllamaProvider,
}

def __init__(self, config_manager: Optional[ConfigManager] = None):
Expand Down
1 change: 1 addition & 0 deletions python/valuecell/config/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,7 @@ def primary_provider(self) -> str:
"openai",
"openai-compatible",
"azure",
"ollama",
]

for preferred in preferred_order:
Expand Down
1 change: 1 addition & 0 deletions python/valuecell/core/super_agent/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,7 @@ async def run(
add_history_to_context=True,
stream=True,
):

if response.content_type == "str":
yield response.content
continue
Expand Down
1 change: 1 addition & 0 deletions python/valuecell/server/api/routers/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,7 @@ def _api_key_url_for(provider: str) -> str | None:
"siliconflow": "https://cloud.siliconflow.cn/account/ak",
"deepseek": "https://platform.deepseek.com/api_keys",
"dashscope": "https://bailian.console.aliyun.com/#/home",
"ollama": None,
}
return mapping.get(provider)

Expand Down