Skip to content

Commit 1c8cbdd

Browse files
committed
Update Ollama evolve
1 parent 25f6e5d commit 1c8cbdd

File tree

7 files changed

+36
-19
lines changed

7 files changed

+36
-19
lines changed

README.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,10 @@ Built by [**Superagentic AI**](https://super-agentic.ai) - *Advancing the future
4545
- **🔧 Multi-Provider LLM Support** - OpenAI, Anthropic, Google, and Ollama (local models included!)
4646
- **⚡ CI/CD Integration** - Automated quality gates and GitHub Actions support
4747

48+
### 📋 Open Source Limitations
49+
50+
The open source version provides core evaluation capabilities. Advanced features like agent evolution and optimization have limited support. For full optimization capabilities tailored to your needs, please get in touch.
51+
4852
---
4953

5054
## Quick Start

docs/index.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,10 @@ When AI coding agents dazzle with impressive code but leave you wondering about
6565

6666
**Cloud providers** (OpenAI, Anthropic, Google) still require API keys. See [Installation](getting-started/installation/#setting-up-llm-providers) for setup.
6767

68+
### 📋 Open Source Limitations
69+
70+
The open source version provides core evaluation capabilities. Advanced features like agent evolution and optimization have limited support. For full optimization capabilities tailored to your needs, please get in touch.
71+
6872
---
6973

7074
## 🚀 Quick Start (30 Seconds)

mkdocs.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -104,10 +104,10 @@ nav:
104104
- Performance Testing: guides/performance-testing.md
105105
- UI Testing: guides/ui-testing.md
106106
- Advanced:
107-
- GEPA Integration: advanced/gepa.md
108-
- Bloom Integration: advanced/bloom.md
109-
- Error Handling: advanced/error-handling.md
110-
- Performance: advanced/performance.md
107+
- GEPA Integration: advanced/gepa.md
108+
- Bloom Integration: advanced/bloom.md
109+
- Error Handling: advanced/error-handling.md
110+
- Performance: advanced/performance.md
111111
- API Reference: api-reference.md
112112
- Examples:
113113
- Behavioral Spec Example: examples/behavioral-spec.md

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "codeoptix"
3-
version = "0.1.0"
3+
version = "0.1.1"
44
description = "Agentic Code Optimization & Deep Evaluation for Superior Coding Agent Experience. Built by Superagentic AI."
55
readme = "README.md"
66
requires-python = ">=3.12"

src/codeoptix/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,4 +5,4 @@
55
Built by Superagentic AI - Advancing AI agent optimization and autonomous systems.
66
"""
77

8-
__version__ = "0.1.0"
8+
__version__ = "0.1.1"

src/codeoptix/cli.py

Lines changed: 18 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -371,7 +371,13 @@ def reflect(input, output, agent_name):
371371
)
372372
@click.option("--iterations", default=3, help="Number of evolution iterations")
373373
@click.option("--config", type=click.Path(exists=True), help="Path to config file (JSON/YAML)")
374-
def evolve(input, reflection, output, iterations, config):
374+
@click.option(
375+
"--llm-provider",
376+
default="openai",
377+
help="LLM provider for evolution (anthropic, openai, google, ollama)",
378+
)
379+
@click.option("--llm-api-key", help="API key for LLM (or set environment variable)")
380+
def evolve(input, reflection, output, iterations, config, llm_provider, llm_api_key):
375381
"""Evolve agent prompts based on evaluation results."""
376382
click.echo("🧬 Evolving agent prompts...")
377383

@@ -428,11 +434,18 @@ def evolve(input, reflection, output, iterations, config):
428434
metadata = results.get("metadata", {})
429435
agent_type = metadata.get("agent", "claude-code")
430436

431-
# Get LLM provider from results or config
432-
llm_provider = evolve_config.get("llm_provider", "openai")
433-
llm_api_key = evolve_config.get("llm_api_key") or os.getenv(f"{llm_provider.upper()}_API_KEY")
437+
# Get LLM provider from command line or config
438+
if llm_provider == "openai": # default, check config
439+
llm_provider = evolve_config.get("llm_provider", "openai")
440+
# llm_api_key param takes precedence, then config, then env
441+
llm_api_key = (
442+
llm_api_key
443+
or evolve_config.get("llm_api_key")
444+
or os.getenv(f"{llm_provider.upper()}_API_KEY")
445+
)
434446

435-
if not llm_api_key:
447+
is_ollama = llm_provider == "ollama"
448+
if not llm_api_key and not is_ollama:
436449
click.echo(
437450
f"❌ LLM API key required. Set {llm_provider.upper()}_API_KEY or use --config", err=True
438451
)

src/codeoptix/utils/llm.py

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -250,7 +250,7 @@ def chat_completion(
250250
"""Generate a chat completion using a local Ollama model."""
251251
payload = {
252252
"model": model,
253-
"messages": messages,
253+
"prompt": messages[0]["content"], # Use first message content as prompt
254254
"stream": False,
255255
"options": {
256256
"temperature": temperature,
@@ -261,7 +261,7 @@ def chat_completion(
261261

262262
data = json.dumps(payload).encode("utf-8")
263263
req = urllib.request.Request(
264-
f"{self.base_url}/api/chat",
264+
f"{self.base_url}/api/generate",
265265
data=data,
266266
headers={"Content-Type": "application/json"},
267267
method="POST",
@@ -290,12 +290,8 @@ def chat_completion(
290290
except json.JSONDecodeError as exc: # pragma: no cover - unexpected response
291291
raise RuntimeError(f"Invalid JSON from Ollama: {body!r}") from exc
292292

293-
# Ollama chat API: response["message"]["content"]
294-
message = obj.get("message") or {}
295-
content = message.get("content")
296-
if isinstance(content, str):
297-
return content
298-
return str(content) if content is not None else ""
293+
# Ollama generate API: response["response"]
294+
return obj.get("response", "")
299295

300296
def get_available_models(self) -> list[str]:
301297
"""Get available Ollama models via /api/tags."""

0 commit comments

Comments
 (0)