Skip to content

Commit 9ddad2e

Browse files
committed
refactor: Remove all OpenAI dependencies - 100% local AI
- Remove OPENAI_API_KEY from docker-compose.yml and CI workflow - Delete USE_MOCK_OPENAI flag from Config class - Update is_ai_enabled() to check for local model services - Replace OpenAI references in env_manager with local model checks - Update gdrive_real.py to use LM Studio for embeddings instead of OpenAI - Delete obsolete test_openai_client.py - Update CI to use local model URLs instead of API keys The project now runs entirely on local models (LM Studio, CLIP, LLaVA) with no cloud dependencies or API keys required.
1 parent 4cdb72d commit 9ddad2e

File tree

6 files changed

+33
-91
lines changed

6 files changed

+33
-91
lines changed

.github/workflows/ci.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,8 @@ jobs:
3434
- name: Run tests
3535
env:
3636
USE_MOCK_DB: "true"
37-
OPENAI_API_KEY: "test-key"
38-
ANTHROPIC_API_KEY: "test-key"
37+
LM_STUDIO_URL: "http://localhost:1234"
38+
CLIP_SERVICE_URL: "http://localhost:8002"
3939
run: |
4040
python -m pytest tests/unit/test_basic_functionality.py tests/unit/test_basic_modules.py tests/unit/test_factory.py -v --tb=short
4141

app/config.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,6 @@ class Config:
6161
# Feature Flags
6262
# NO MOCKS - PostgreSQL ONLY
6363
USE_MOCK_DATABASE: bool = False
64-
USE_MOCK_OPENAI: bool = False
6564
FEATURE_SESSIONS_ENABLED: bool = env.get_bool("FEATURE_SESSIONS_ENABLED", True)
6665
FEATURE_ATTACHMENTS_ENABLED: bool = env.get_bool("FEATURE_ATTACHMENTS_ENABLED", True)
6766
ENABLE_TELEMETRY: bool = env.get_bool("ENABLE_TELEMETRY", False)
@@ -115,8 +114,9 @@ def get_summary(cls) -> dict:
115114

116115
@classmethod
117116
def is_ai_enabled(cls) -> bool:
118-
"""Check if AI features are enabled."""
119-
return bool(cls.OPENAI_API_KEY) or cls.USE_MOCK_OPENAI
117+
"""Check if AI features are enabled (local models)."""
118+
# AI is always enabled with local models - no API keys needed!
119+
return bool(cls.LM_STUDIO_URL or cls.CLIP_SERVICE_URL or cls.LLAVA_SERVICE_URL)
120120

121121
@classmethod
122122
def is_database_configured(cls) -> bool:

app/core/env_manager.py

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -37,8 +37,6 @@ class EnvManager:
3737

3838
# Sensitive keys that should be masked in logs
3939
SENSITIVE_KEYS = {
40-
"OPENAI_API_KEY",
41-
"ANTHROPIC_API_KEY",
4240
"JWT_SECRET_KEY",
4341
"DATABASE_URL",
4442
"POSTGRES_PASSWORD",
@@ -47,6 +45,8 @@ class EnvManager:
4745
"SENTRY_DSN",
4846
"AWS_SECRET_KEY",
4947
"AWS_ACCESS_KEY_ID",
48+
"GOOGLE_CLIENT_SECRET",
49+
"CIPHER_API_KEY",
5050
}
5151

5252
def __init__(self, env_file: Path | None = None) -> None:
@@ -123,9 +123,9 @@ def _validate_required(self) -> None:
123123
if self.get("JWT_SECRET_KEY", "").endswith("change-in-production"):
124124
raise ValueError("JWT_SECRET_KEY must be changed for production")
125125

126-
# Check OpenAI key if not using mocks
127-
if not self.get_bool("USE_MOCK_OPENAI") and not self.get("OPENAI_API_KEY"):
128-
logger.warning("OPENAI_API_KEY not set, AI features will be limited")
126+
# Check local model services
127+
if not self.get("LM_STUDIO_URL") and not self.get("CLIP_SERVICE_URL"):
128+
logger.warning("Local model services not configured, AI features may be limited")
129129

130130
# Validate required variables
131131
missing = [var for var in required if not os.getenv(var)]
@@ -307,15 +307,14 @@ def get_config_summary(self) -> dict[str, Any]:
307307
return {
308308
"environment": self.environment.value,
309309
"database_configured": bool(self.get("DATABASE_URL") or self.get("POSTGRES_DB")),
310-
"openai_configured": bool(self.get("OPENAI_API_KEY")),
310+
"local_models_configured": bool(self.get("LM_STUDIO_URL") or self.get("CLIP_SERVICE_URL")),
311311
"redis_configured": bool(self.get("REDIS_URL")),
312312
"debug_mode": self.get_bool("DEBUG"),
313313
"host": self.get("HOST", "127.0.0.1"),
314314
"port": self.get_int("PORT", 8000),
315315
"log_level": self.get("LOG_LEVEL", "INFO"),
316316
"features": {
317317
"mock_database": self.get_bool("USE_MOCK_DATABASE"),
318-
"mock_openai": self.get_bool("USE_MOCK_OPENAI"),
319318
"sessions": self.get_bool("FEATURE_SESSIONS_ENABLED", True),
320319
"attachments": self.get_bool("FEATURE_ATTACHMENTS_ENABLED", True),
321320
},
@@ -344,8 +343,8 @@ def validate_production_ready(self) -> list[str]:
344343
if self.get("HOST") == "0.0.0.0":
345344
issues.append("HOST is set to 0.0.0.0 (security risk)")
346345

347-
if not self.get("OPENAI_API_KEY") and not self.get_bool("USE_MOCK_OPENAI"):
348-
issues.append("No OpenAI API key configured")
346+
if not self.get("LM_STUDIO_URL") and not self.get("CLIP_SERVICE_URL"):
347+
issues.append("No local model services configured")
349348

350349
if self.get_bool("USE_MOCK_DATABASE"):
351350
issues.append("Mock database is enabled")

app/routes/gdrive_real.py

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -13,13 +13,9 @@
1313
from app.services.memory_service_postgres import MemoryServicePostgres
1414
from app.utils.logging_config import get_logger
1515

16-
# Import OpenAI for embeddings
17-
try:
18-
import openai
19-
20-
OPENAI_AVAILABLE = True
21-
except ImportError:
22-
OPENAI_AVAILABLE = False
16+
# Local model configuration
17+
LM_STUDIO_AVAILABLE = bool(os.getenv("LM_STUDIO_URL"))
18+
CLIP_SERVICE_AVAILABLE = bool(os.getenv("CLIP_SERVICE_URL"))
2319

2420
logger = get_logger(__name__)
2521
router = APIRouter()
@@ -131,22 +127,26 @@ async def sync_file(
131127
if not file_info:
132128
raise HTTPException(status_code=404, detail="File not found")
133129

134-
# Generate embeddings if OpenAI is available
130+
# Generate embeddings using local models
135131
embeddings = None
136-
if (
137-
OPENAI_AVAILABLE
138-
and os.getenv("OPENAI_API_KEY")
139-
and os.getenv("OPENAI_API_KEY") != "sk-mock-key-for-testing-only"
140-
):
132+
if LM_STUDIO_AVAILABLE:
141133
try:
142-
openai.api_key = os.getenv("OPENAI_API_KEY")
143-
response = await openai.Embedding.acreate(
144-
model="text-embedding-ada-002",
145-
input=content[:8000], # Limit to 8000 chars for embedding
146-
)
147-
embeddings = response["data"][0]["embedding"]
134+
# Use LM Studio for text embeddings (Nomic)
135+
import aiohttp
136+
async with aiohttp.ClientSession() as session:
137+
payload = {
138+
"model": "text-embedding-nomic-embed-text-v1.5",
139+
"input": content[:8000], # Limit to 8000 chars for embedding
140+
}
141+
async with session.post(
142+
f"{os.getenv('LM_STUDIO_URL')}/embeddings",
143+
json=payload,
144+
) as response:
145+
if response.status == 200:
146+
result = await response.json()
147+
embeddings = result["data"][0]["embedding"]
148148
except Exception as e:
149-
logger.warning(f"Could not generate embeddings: {e}")
149+
logger.warning(f"Could not generate embeddings with LM Studio: {e}")
150150

151151
# Create memory in PostgreSQL
152152
memory_data = {

docker-compose.yml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@ services:
1515
environment:
1616
- DATABASE_URL=postgresql://secondbrain:changeme@postgres:5432/secondbrain
1717
- ENVIRONMENT=development
18-
- OPENAI_API_KEY=${OPENAI_API_KEY}
1918
volumes:
2019
- .:/app:rw
2120
- /app/.venv # Exclude .venv from mount

tests/unit/test_openai_client.py

Lines changed: 0 additions & 56 deletions
This file was deleted.

0 commit comments

Comments
 (0)