diff --git a/.env.example b/.env.example index 37301da5..76694cf8 100644 --- a/.env.example +++ b/.env.example @@ -5,7 +5,7 @@ # -------------------------------------------- # Backend Server Settings # -------------------------------------------- -OM_PORT=8080 +OM_PORT=18080 # API Authentication (IMPORTANT: Set a strong API key for production!) # Generate a secure key: openssl rand -base64 32 diff --git a/.env.project.example b/.env.project.example new file mode 100644 index 00000000..79f3b6b0 --- /dev/null +++ b/.env.project.example @@ -0,0 +1,19 @@ +# OpenMemory (project-scoped instance) +# +# Copy to `.env.project.` and customize. +# Start: +# docker compose --project-name om_ --env-file .env.project. up -d --build openmemory + +OM_PORT=18080 +# Generate: openssl rand -base64 32 +OM_API_KEY=change-me + +OM_MODE=standard +OM_TIER=hybrid +OM_EMBEDDINGS=synthetic +OM_EMBEDDING_FALLBACK=synthetic + +OM_METADATA_BACKEND=sqlite +OM_VECTOR_BACKEND=sqlite +OM_DB_PATH=/data/openmemory.sqlite + diff --git a/.env.team.example b/.env.team.example new file mode 100644 index 00000000..eb7c6bda --- /dev/null +++ b/.env.team.example @@ -0,0 +1,18 @@ +# OpenMemory (team-shared instance) +# +# This is an explicit shared memory instance (opt-in). +# Copy to `.env.team` and customize. + +OM_PORT=18081 +# Generate: openssl rand -base64 32 +OM_API_KEY=change-me + +OM_MODE=standard +OM_TIER=hybrid +OM_EMBEDDINGS=synthetic +OM_EMBEDDING_FALLBACK=synthetic + +OM_METADATA_BACKEND=sqlite +OM_VECTOR_BACKEND=sqlite +OM_DB_PATH=/data/openmemory.sqlite + diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml index 54f76715..d42ecb97 100644 --- a/.github/workflows/docker-build.yml +++ b/.github/workflows/docker-build.yml @@ -47,7 +47,7 @@ jobs: run: | max_attempts=30 attempt=0 - until curl -f http://localhost:8080/health || [ $attempt -eq $max_attempts ]; do + until curl -f http://localhost:18080/health || [ $attempt -eq $max_attempts ]; do echo "Waiting for service to be healthy... (attempt $((attempt+1))/$max_attempts)" sleep 2 attempt=$((attempt+1)) @@ -60,7 +60,7 @@ jobs: fi echo "✅ Service is healthy!" - curl -v http://localhost:8080/health + curl -v http://localhost:18080/health - name: Show Container Logs if: failure() diff --git a/.github/workflows/publish-sdks.yml b/.github/workflows/publish-sdks.yml index bdc0c792..33a41c04 100644 --- a/.github/workflows/publish-sdks.yml +++ b/.github/workflows/publish-sdks.yml @@ -31,6 +31,8 @@ jobs: needs: detect-changes runs-on: ubuntu-latest if: needs.detect-changes.outputs.js_changed == 'true' || github.event_name == 'workflow_dispatch' + env: + NPM_TOKEN: ${{ secrets.NPM_TOKEN }} steps: - uses: actions/checkout@v4 @@ -54,10 +56,15 @@ jobs: run: cd packages/openmemory-js && npm run build - name: publish + if: env.NPM_TOKEN != '' run: cd packages/openmemory-js && npm publish --access public env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + - name: publish skipped + if: env.NPM_TOKEN == '' + run: echo "ℹ️ NPM_TOKEN not set; skipping npm publish." + - name: notify run: | echo "✅ Published openmemory-js@${{ steps.version.outputs.version }}" @@ -66,6 +73,8 @@ jobs: needs: detect-changes runs-on: ubuntu-latest if: needs.detect-changes.outputs.py_changed == 'true' || github.event_name == 'workflow_dispatch' + env: + PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} steps: - uses: actions/checkout@v4 @@ -88,11 +97,16 @@ jobs: run: cd packages/openmemory-py && python -m build - name: publish + if: env.PYPI_TOKEN != '' run: cd packages/openmemory-py && python -m twine upload dist/* --skip-existing env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} + - name: publish skipped + if: env.PYPI_TOKEN == '' + run: echo "ℹ️ PYPI_TOKEN not set; skipping twine upload." + - name: notify run: | echo "✅ Published openmemory-py@${{ steps.version.outputs.version }}" diff --git a/.gitignore b/.gitignore index 883f58c9..5440c6d9 100644 --- a/.gitignore +++ b/.gitignore @@ -23,4 +23,6 @@ __pycache__ test *.db hits.json -debug_*.log \ No newline at end of file +debug_*.log +test-results/ +.DS_Store diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md index 6f073b34..f66f6f7b 100644 --- a/ARCHITECTURE.md +++ b/ARCHITECTURE.md @@ -20,7 +20,7 @@ OpenMemory is a self-hosted AI memory engine implementing **Hierarchical Memory ┌───────────▼───────────┐ │ REST API SERVER │ │ (TypeScript/Node) │ - │ Port: 8080 │ + │ Port: 18080 │ └───────────┬───────────┘ │ ┌────────────────────────────┼────────────────────────────┐ @@ -529,7 +529,7 @@ NODE_SECTOR_MAP = { ```bash # Server -OM_PORT=8080 +OM_PORT=18080 OM_DB_PATH=./data/openmemory.sqlite OM_API_KEY= # Optional bearer token @@ -683,7 +683,7 @@ docker compose up -d Ports: -- `8080` → API server +- `18080` → API server - Data persisted in `/data/openmemory.sqlite` ### Manual diff --git a/Makefile b/Makefile index 1336e9ec..168255ec 100644 --- a/Makefile +++ b/Makefile @@ -11,48 +11,43 @@ help: ## Show this help message # Installation and Setup install: ## Install all dependencies - @echo "📦 Installing backend dependencies..." - cd backend && npm install - @echo "📦 Installing JavaScript SDK dependencies..." - cd sdk-js && npm install + @echo "📦 Installing JavaScript (server + SDK) dependencies..." + cd packages/openmemory-js && npm install @echo "📦 Installing Python SDK dependencies..." - cd sdk-py && pip install -e . + cd packages/openmemory-py && pip install -e . @echo "✅ All dependencies installed!" install-dev: ## Install development dependencies @echo "🛠️ Installing development dependencies..." - cd backend && npm install - cd sdk-js && npm install - cd sdk-py && pip install -e .[dev] + cd packages/openmemory-js && npm install + cd packages/openmemory-py && pip install -e .[dev] @echo "✅ Development dependencies installed!" # Build build: ## Build all components - @echo "🏗️ Building backend..." - cd backend && npm run build - @echo "🏗️ Building JavaScript SDK..." - cd sdk-js && npm run build + @echo "🏗️ Building JavaScript (server + SDK)..." + cd packages/openmemory-js && npm run build @echo "✅ All components built!" build-backend: ## Build backend only - cd backend && npm run build + cd packages/openmemory-js && npm run build build-js-sdk: ## Build JavaScript SDK only - cd sdk-js && npm run build + cd packages/openmemory-js && npm run build # Development dev: ## Start development server @echo "🚀 Starting development server..." - cd backend && npm run dev + cd packages/openmemory-js && npm run dev dev-watch: ## Start development server with file watching @echo "👀 Starting development server with watching..." - cd backend && npm run dev + cd packages/openmemory-js && npm run dev # Production start: ## Start production server @echo "🚀 Starting production server..." - cd backend && npm start + cd packages/openmemory-js && npm run start stop: ## Stop server (if running as daemon) @echo "🛑 Stopping server..." @@ -61,57 +56,52 @@ stop: ## Stop server (if running as daemon) # Testing test: ## Run all tests @echo "🧪 Running all tests..." - @echo "Testing backend API..." - node tests/backend/api-simple.test.js - @echo "Testing JavaScript SDK..." - node tests/js-sdk/sdk-simple.test.js + @echo "Testing JavaScript (server + SDK)..." + cd packages/openmemory-js && npx tsx tests/verify.ts @echo "Testing Python SDK..." - cd tests/py-sdk && python test-simple.py + cd packages/openmemory-py && python -m pytest -q test-backend: ## Run backend tests only - @echo "🧪 Testing backend API..." - node tests/backend/api-simple.test.js + @echo "🧪 Testing JavaScript (server + SDK)..." + cd packages/openmemory-js && npx tsx tests/verify.ts test-js-sdk: ## Run JavaScript SDK tests only - @echo "🧪 Testing JavaScript SDK..." - node tests/js-sdk/sdk-simple.test.js + @echo "🧪 Testing JavaScript (server + SDK)..." + cd packages/openmemory-js && npx tsx tests/verify.ts test-py-sdk: ## Run Python SDK tests only @echo "🧪 Testing Python SDK..." - cd tests/py-sdk && python test-simple.py + cd packages/openmemory-py && python -m pytest -q test-integration: ## Run integration tests @echo "🔗 Running integration tests..." - node tests/backend/api.test.js + cd packages/openmemory-js && npx tsx tests/test_omnibus.ts # Code Quality lint: ## Run linters @echo "🔍 Running linters..." - cd backend && npm run lint || echo "Backend linting completed" - cd sdk-js && npm run lint || echo "JS SDK linting completed" - cd sdk-py && python -m flake8 . || echo "Python linting completed" + cd packages/openmemory-js && npx prettier --check "src/**/*.ts" "tests/**/*.ts" || echo "JS formatting check completed" + cd packages/openmemory-py && python -m black --check . || echo "Python formatting check completed" format: ## Format code @echo "🎨 Formatting code..." - cd backend && npm run format || echo "Backend formatting completed" - cd sdk-js && npm run format || echo "JS SDK formatting completed" - cd sdk-py && python -m black . || echo "Python formatting completed" + cd packages/openmemory-js && npm run format || echo "JS formatting completed" + cd packages/openmemory-py && python -m black . || echo "Python formatting completed" type-check: ## Run type checking @echo "🏷️ Running type checks..." - cd backend && npx tsc --noEmit - cd sdk-js && npx tsc --noEmit + cd packages/openmemory-js && npx tsc --noEmit # Database db-reset: ## Reset database @echo "🗄️ Resetting database..." - rm -f backend/database/*.db + rm -f packages/openmemory-js/data/*.sqlite @echo "✅ Database reset!" db-backup: ## Backup database @echo "💾 Backing up database..." mkdir -p backups - cp backend/database/*.db backups/ || echo "No database files found" + cp packages/openmemory-js/data/*.sqlite backups/ || echo "No database files found" @echo "✅ Database backed up!" # Docker @@ -121,36 +111,33 @@ docker-build: ## Build Docker image docker-run: ## Run Docker container @echo "🐳 Running Docker container..." - docker run -p 8080:8080 openmemory + docker run -p 18080:18080 openmemory docker-dev: ## Run development environment with Docker @echo "🐳 Starting development environment..." - docker-compose up --build + docker compose up --build openmemory docker-stop: ## Stop Docker containers @echo "🐳 Stopping Docker containers..." - docker-compose down + docker compose down run: docker-dev ## Alias for docker-dev # Cleanup clean: ## Clean build artifacts @echo "🧹 Cleaning build artifacts..." - rm -rf backend/dist/ - rm -rf sdk-js/dist/ - rm -rf sdk-js/node_modules/.cache/ - rm -rf backend/node_modules/.cache/ + rm -rf packages/openmemory-js/dist/ + rm -rf packages/openmemory-js/node_modules/.cache/ find . -name "*.pyc" -delete find . -name "__pycache__" -type d -exec rm -rf {} + || true @echo "✅ Cleanup complete!" clean-all: clean ## Clean everything including node_modules @echo "🧹 Deep cleaning..." - rm -rf backend/node_modules/ - rm -rf sdk-js/node_modules/ - rm -rf sdk-py/build/ - rm -rf sdk-py/dist/ - rm -rf sdk-py/*.egg-info/ + rm -rf packages/openmemory-js/node_modules/ + rm -rf packages/openmemory-py/build/ + rm -rf packages/openmemory-py/dist/ + rm -rf packages/openmemory-py/*.egg-info/ @echo "✅ Deep cleanup complete!" # Examples @@ -173,4 +160,4 @@ quick-test: build test-backend ## Quick test after build @echo "⚡ Quick test complete!" full-check: clean install build lint test ## Full check before commit - @echo "✅ Full check complete - ready to commit!" \ No newline at end of file + @echo "✅ Full check complete - ready to commit!" diff --git a/README.md b/README.md index ed6a3a0b..7525725a 100644 --- a/README.md +++ b/README.md @@ -174,26 +174,97 @@ git clone https://github.com/CaviraOSS/OpenMemory.git cd OpenMemory cp .env.example .env -cd backend +cd packages/openmemory-js npm install -npm run dev # default :8080 +npm run dev # default :18080 (see OM_PORT in .env) ``` Or with Docker: ```bash -docker compose up --build -d +docker compose up --build -d openmemory ``` +Default port is `18080` (set `OM_PORT` in `.env` to override). + +Validate: + +```bash +curl http://localhost:18080/health +``` + +Stop: + +```bash +docker compose down +``` + +### Multi-project (project-scoped by default) + +Run one OpenMemory instance per project by using a separate env file + compose project name (each gets its own volume and port): + +```bash +docker compose --project-name om_projectA --env-file .env.projectA up -d --build openmemory +docker compose --project-name om_projectB --env-file .env.projectB up -d --build openmemory +``` + +Optional “team-shared” memory is just another explicit instance: + +```bash +docker compose --project-name om_team --env-file .env.team up -d --build openmemory +``` + +VS Code: keep memory per project by using workspace settings (`openmemory.backendUrl` + optional `openmemory.userId`). By default the extension now scopes `user_id` per workspace project name. + +Practical WB Repricer System setup: `docs/projects/wb-repricer.md`. + The backend exposes: -- `/api/memory/*` – memory operations +- `/memory/*` – memory operations - `/api/temporal/*` – temporal knowledge graph -- `/mcp` – MCP server -- dashboard UI +- `/api/ide/*` – IDE events + context endpoint +- `/mcp` – MCP server (HTTP transport) + +Update/delete are supported via both HTTP (`PATCH /memory/:id`, `DELETE /memory/:id`) and MCP (`openmemory_update`, `openmemory_delete`). --- +### 2.4 Integrate OpenMemory into your projects + +Recommended default: **one OpenMemory instance per project** (separate port + DB volume + API key). “Team memory” is a separate explicit instance you connect to only when needed. + +**A) Start per-project backend** + +- Create `.env.project.` from `.env.project.example` and choose a unique `OM_PORT` / `OM_API_KEY`. +- Run (isolated volume + container names): + +```bash +docker compose --project-name om_ --env-file .env.project. up -d --build openmemory +``` + +**B) Connect IDEs (VS Code / Cursor / Claude / Codex / etc.)** + +- VS Code extension: set `openmemory.backendUrl` to your project instance (e.g. `http://localhost:`) and set `openmemory.apiKey`. +- By default, the extension scopes `user_id` per workspace project; set `openmemory.userId` if you want a stable shared identity across IDEs for the same person/agent. +- MCP clients: point to `http://localhost:/mcp` (auth via `x-api-key`). + +**C) Connect your app / agents** + +Use the SDKs for embedded memory, or call the server: + +```bash +# server mode +curl -H "x-api-key: $OM_API_KEY" -H "content-type: application/json" \ + -d '{"content":"decision: use docker per project","user_id":"agent-core"}' \ + http://localhost:/memory/add +``` + +Conventions that work well: +- `user_id`: actor identity inside the project (`alice`, `codex`, `ci`, `agent-core`). +- `metadata`: include `project`, `repo`, `branch`, `task_id`, `source` when writing memories. + +More details: `docs/multi-project.md`. + ## 3. Why OpenMemory (vs RAG, vs “just vectors”) LLMs forget everything between messages. @@ -310,7 +381,7 @@ OpenMemory ships a native MCP server, so any MCP‑aware client can treat it as ### Claude / Claude Code ```bash -claude mcp add --transport http openmemory http://localhost:8080/mcp +claude mcp add --transport http openmemory http://localhost:18080/mcp ``` ### Cursor / Windsurf @@ -322,7 +393,7 @@ claude mcp add --transport http openmemory http://localhost:8080/mcp "mcpServers": { "openmemory": { "type": "http", - "url": "http://localhost:8080/mcp" + "url": "http://localhost:18080/mcp" } } } @@ -335,6 +406,8 @@ Available tools include: - `openmemory_list` - `openmemory_get` - `openmemory_reinforce` +- `openmemory_update` +- `openmemory_delete` Your IDE assistant can query, store, list, and reinforce memories without you wiring every call manually. @@ -388,7 +461,7 @@ The `opm` CLI talks directly to the engine / server. ### Install ```bash -cd backend +cd packages/openmemory-js npm install npm link # adds `opm` to your PATH ``` @@ -534,4 +607,4 @@ Issues and PRs are welcome. ## 13. License -OpenMemory is licensed under **Apache 2.0**. See [LICENSE](LICENSE) for details. \ No newline at end of file +OpenMemory is licensed under **Apache 2.0**. See [LICENSE](LICENSE) for details. diff --git a/app.json b/app.json index 8e433654..bc7fb188 100644 --- a/app.json +++ b/app.json @@ -11,9 +11,6 @@ "NODE_ENV": { "value": "production" }, - "OM_PORT": { - "value": "8080" - }, "OM_API_KEY": { "description": "Secure API key" }, @@ -24,4 +21,4 @@ "scripts": { "postdeploy": "cd packages/openmemory-js && npm install && npm run build" } -} \ No newline at end of file +} diff --git a/apps/vscode-extension/README.md b/apps/vscode-extension/README.md index 58eb02c9..ecf7aa69 100644 --- a/apps/vscode-extension/README.md +++ b/apps/vscode-extension/README.md @@ -26,11 +26,11 @@ Backend server required. ## Settings -- `openmemory.backendUrl`: Backend URL (default: `http://localhost:8080`) +- `openmemory.backendUrl`: Backend URL (default: `http://localhost:18080`) - `openmemory.apiKey`: API key for auth (optional) - `openmemory.useMCP`: Use MCP protocol mode (default: `false`) - connects to backend MCP server with tools: `openmemory_query`, `openmemory_store`, `openmemory_list`, `openmemory_get`, `openmemory_reinforce` -- `openmemory.mcpServerPath`: Path to backend MCP server (default: `backend/dist/ai/mcp.js`) -- `openmemory.userId`: Custom User ID (optional, defaults to auto-generated) +- `openmemory.mcpServerPath`: Path to OpenMemory MCP server (defaults to `packages/openmemory-js/dist/ai/mcp.js` in this repo; legacy `backend/dist/ai/mcp.js` is also supported) +- `openmemory.userId`: Custom User ID (optional). If unset, OpenMemory generates a stable `user_id` per workspace project (project-scoped by default). - `openmemory.projectName`: Custom Project Name (optional, defaults to workspace name) ## Commands @@ -48,7 +48,7 @@ All data stores locally. No telemetry. Open source code available for audit. ## Troubleshooting -Check backend running: `curl http://localhost:8080/health` +Check backend running: `curl http://localhost:18080/health` For issues, see [GitHub](https://github.com/CaviraOSS/OpenMemory/issues) diff --git a/apps/vscode-extension/package-lock.json b/apps/vscode-extension/package-lock.json index 9d126f33..7887253b 100644 --- a/apps/vscode-extension/package-lock.json +++ b/apps/vscode-extension/package-lock.json @@ -1,16 +1,16 @@ { "name": "openmemory-vscode", - "version": "1.0.6", + "version": "1.0.7", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "openmemory-vscode", - "version": "1.0.6", + "version": "1.0.7", "hasInstallScript": true, "devDependencies": { "@types/node": "^18.x", - "@types/vscode": "^1.105.0", + "@types/vscode": "^1.104.0", "@typescript-eslint/eslint-plugin": "^6.15.0", "@typescript-eslint/parser": "^6.15.0", "eslint": "^8.56.0", @@ -18,7 +18,7 @@ "vscode": "^1.1.37" }, "engines": { - "vscode": "^1.105.0" + "vscode": "^1.104.0" } }, "node_modules/@eslint-community/eslint-utils": { diff --git a/apps/vscode-extension/package.json b/apps/vscode-extension/package.json index a7cbb4cf..e88b8614 100644 --- a/apps/vscode-extension/package.json +++ b/apps/vscode-extension/package.json @@ -74,7 +74,7 @@ }, "openmemory.backendUrl": { "type": "string", - "default": "http://localhost:8080", + "default": "http://localhost:18080", "description": "URL of the OpenMemory backend server" }, "openmemory.apiKey": { @@ -111,7 +111,7 @@ "watch": "tsc -watch -p ./", "pretest": "npm run compile", "lint": "eslint src --ext ts", - "postinstall": "node postinstall.js" + "postinstall": "npm run compile && node postinstall.js" }, "dependencies": {}, "devDependencies": { @@ -123,4 +123,4 @@ "typescript": "^5.3.0", "vscode": "^1.1.37" } -} \ No newline at end of file +} diff --git a/apps/vscode-extension/postinstall.js b/apps/vscode-extension/postinstall.js index d7c1eabc..9786c99a 100644 --- a/apps/vscode-extension/postinstall.js +++ b/apps/vscode-extension/postinstall.js @@ -1,13 +1,49 @@ #!/usr/bin/env node +const tryRequire = (p) => { + try { + return require(p); + } catch { + return null; + } +}; + const { detectBackend } = require('./out/detectors/openmemory'); -const { writeMCPConfig } = require('./out/mcp/generator'); const { writeCursorConfig } = require('./out/writers/cursor'); const { writeClaudeConfig } = require('./out/writers/claude'); const { writeWindsurfConfig } = require('./out/writers/windsurf'); const { writeCopilotConfig } = require('./out/writers/copilot'); const { writeCodexConfig } = require('./out/writers/codex'); -const DEFAULT_URL = 'http://localhost:8080'; +// Optional: not all builds ship a standalone MCP config generator. +const mcpGenerator = tryRequire('./out/mcp/generator'); + +const DEFAULT_URL = 'http://localhost:18080'; + +function readApiKeyFromEnvFile() { + const envCandidates = [ + process.env.OPENMEMORY_API_KEY, + process.env.OM_API_KEY, + ].filter(Boolean); + if (envCandidates.length) return envCandidates[0]; + + // Dev/monorepo installs: try to read the repo root `.env`. + try { + const fs = require('fs'); + const path = require('path'); + const rootEnv = path.resolve(__dirname, '..', '..', '.env'); + if (!fs.existsSync(rootEnv)) return undefined; + + const raw = fs.readFileSync(rootEnv, 'utf8'); + const m = raw.match(/^\s*OM_API_KEY\s*=\s*(.*)\s*$/m); + if (!m) return undefined; + + const val = (m[1] || '').trim(); + if (!val || val === 'your-secret-api-key-here') return undefined; + return val; + } catch { + return undefined; + } +} async function postInstall() { console.log('🧠 OpenMemory IDE Extension - Auto-Setup'); @@ -21,22 +57,26 @@ async function postInstall() { console.log('\nAuto-linking AI tools...'); try { - const mcpPath = await writeMCPConfig(DEFAULT_URL); - console.log(` ✓ MCP config: ${mcpPath}`); + const apiKey = readApiKeyFromEnvFile(); + + if (mcpGenerator?.writeMCPConfig) { + const mcpPath = await mcpGenerator.writeMCPConfig(DEFAULT_URL, apiKey); + console.log(` ✓ MCP config: ${mcpPath}`); + } - const cursorPath = await writeCursorConfig(DEFAULT_URL); + const cursorPath = await writeCursorConfig(DEFAULT_URL, apiKey); console.log(` ✓ Cursor config: ${cursorPath}`); - const claudePath = await writeClaudeConfig(DEFAULT_URL); + const claudePath = await writeClaudeConfig(DEFAULT_URL, apiKey); console.log(` ✓ Claude config: ${claudePath}`); - const windsurfPath = await writeWindsurfConfig(DEFAULT_URL); + const windsurfPath = await writeWindsurfConfig(DEFAULT_URL, apiKey); console.log(` ✓ Windsurf config: ${windsurfPath}`); - const copilotPath = await writeCopilotConfig(DEFAULT_URL); + const copilotPath = await writeCopilotConfig(DEFAULT_URL, apiKey); console.log(` ✓ GitHub Copilot config: ${copilotPath}`); - const codexPath = await writeCodexConfig(DEFAULT_URL); + const codexPath = await writeCodexConfig(DEFAULT_URL, apiKey); console.log(` ✓ Codex config: ${codexPath}`); console.log( '\n🎉 Setup complete! All AI tools can now access OpenMemory.', @@ -56,7 +96,7 @@ async function postInstall() { } else { console.log('⚠️ Backend not detected at', DEFAULT_URL); console.log('\nTo start the backend:'); - console.log(' cd backend && npm start'); + console.log(' cd packages/openmemory-js && npm run dev'); console.log( '\nAuto-link will run automatically when you activate the extension.', ); diff --git a/apps/vscode-extension/src/extension.ts b/apps/vscode-extension/src/extension.ts index eb9843d7..a14f57a5 100644 --- a/apps/vscode-extension/src/extension.ts +++ b/apps/vscode-extension/src/extension.ts @@ -1,4 +1,5 @@ import * as vscode from 'vscode'; +import * as crypto from 'crypto'; import { shouldSkipEvent, getSectorFilter } from './hooks/ideEvents'; import { writeCursorConfig } from './writers/cursor'; import { writeClaudeConfig } from './writers/claude'; @@ -9,7 +10,7 @@ import { DashboardPanel } from './panels/DashboardPanel'; import { generateDiff } from './utils/diff'; let session_id: string | null = null; -let backend_url = 'http://localhost:8080'; +let backend_url = 'http://localhost:18080'; let api_key: string | undefined = undefined; let status_bar: vscode.StatusBarItem; let is_tracking = false; @@ -23,7 +24,7 @@ const fileCache = new Map(); export function activate(context: vscode.ExtensionContext) { const config = vscode.workspace.getConfiguration('openmemory'); is_enabled = config.get('enabled') ?? true; - backend_url = config.get('backendUrl') || 'http://localhost:8080'; + backend_url = config.get('backendUrl') || 'http://localhost:18080'; api_key = config.get('apiKey') || undefined; use_mcp = config.get('useMCP') || false; mcp_server_path = config.get('mcpServerPath') || ''; @@ -334,7 +335,7 @@ async function show_quick_setup() { } break; case 'url': - const url = await vscode.window.showInputBox({ prompt: 'Enter backend URL', value: backend_url, placeHolder: 'http://localhost:8080' }); + const url = await vscode.window.showInputBox({ prompt: 'Enter backend URL', value: backend_url, placeHolder: 'http://localhost:18080' }); if (url) { const config = vscode.workspace.getConfiguration('openmemory'); await config.update('backendUrl', url, vscode.ConfigurationTarget.Global); @@ -359,25 +360,36 @@ async function show_quick_setup() { } } -function getUserId(context: vscode.ExtensionContext, config: vscode.WorkspaceConfiguration): string { - // 1. Check if user has configured a custom userId - const configuredUserId = config.get('userId'); - if (configuredUserId) return configuredUserId; + function getUserId(context: vscode.ExtensionContext, config: vscode.WorkspaceConfiguration): string { + // 1. Check if user has configured a custom userId + const configuredUserId = config.get('userId'); + if (configuredUserId) return configuredUserId; - // 2. Check if we have a persistent userId in global state - let persistedUserId = context.globalState.get('openmemory.userId'); - if (persistedUserId) return persistedUserId; + // 2. Project-scoped by default (separate memory per project/workspace) + const configuredProject = config.get('projectName'); + const project = configuredProject || vscode.workspace.workspaceFolders?.[0]?.name || 'unknown'; + const projectHash = crypto.createHash('sha1').update(project).digest('hex').slice(0, 10); + const projectUserIdKey = `openmemory.userId.${projectHash}`; - // 3. Generate a new unique userId based on machine ID - const machineId = vscode.env.machineId; // Unique per machine - const userName = process.env.USERNAME || process.env.USER || 'user'; - persistedUserId = `${userName}-${machineId.substring(0, 8)}`; + let persistedUserId = context.globalState.get(projectUserIdKey); + if (persistedUserId) return persistedUserId; - // 4. Persist it for future sessions - context.globalState.update('openmemory.userId', persistedUserId); + // 3. Generate a new unique userId based on machine ID + const machineId = vscode.env.machineId; // Unique per machine + const userName = process.env.USERNAME || process.env.USER || 'user'; + const projectSlug = project + .toLowerCase() + .replace(/[^a-z0-9]+/g, '-') + .replace(/^-+|-+$/g, '') + .slice(0, 32) || 'project'; - return persistedUserId; -} + persistedUserId = `${userName}-${machineId.substring(0, 8)}:${projectSlug}`; + + // 4. Persist it for future sessions + context.globalState.update(projectUserIdKey, persistedUserId); + + return persistedUserId; + } async function check_connection(): Promise { try { @@ -428,7 +440,7 @@ async function send_event(event_data: { event_type: string; file_path: string; l } async function query_context(query: string, file: string) { - const response = await fetch(`${backend_url}/api/ide/context`, { method: 'POST', headers: get_headers(), body: JSON.stringify({ query, session_id, file_path: file, limit: 10 }) }); + const response = await fetch(`${backend_url}/api/ide/context`, { method: 'POST', headers: get_headers(), body: JSON.stringify({ query, session_id, user_id, file_path: file, limit: 10 }) }); const data = await response.json(); return data.memories || []; } diff --git a/apps/vscode-extension/src/utils/mcpPath.ts b/apps/vscode-extension/src/utils/mcpPath.ts new file mode 100644 index 00000000..495176e9 --- /dev/null +++ b/apps/vscode-extension/src/utils/mcpPath.ts @@ -0,0 +1,21 @@ +import * as fs from "fs"; +import * as path from "path"; + +export function resolveOpenMemoryMcpServerPath(explicitPath?: string): string { + if (explicitPath && explicitPath.trim()) return explicitPath; + + const cwd = process.cwd(); + const candidates = [ + path.join(cwd, "packages", "openmemory-js", "dist", "ai", "mcp.js"), + path.join(cwd, "backend", "dist", "ai", "mcp.js"), // legacy layout + path.join(cwd, "sdk-js", "dist", "ai", "mcp.js"), // legacy layout + ]; + + for (const candidate of candidates) { + if (fs.existsSync(candidate)) return candidate; + } + + // Prefer the current monorepo layout as the default. + return candidates[0]; +} + diff --git a/apps/vscode-extension/src/writers/claude.ts b/apps/vscode-extension/src/writers/claude.ts index 2f79ca2f..056280ad 100644 --- a/apps/vscode-extension/src/writers/claude.ts +++ b/apps/vscode-extension/src/writers/claude.ts @@ -1,6 +1,7 @@ import * as fs from 'fs'; import * as path from 'path'; import * as os from 'os'; +import { resolveOpenMemoryMcpServerPath } from '../utils/mcpPath'; export interface ClaudeConfig { mcpServers?: { @@ -17,7 +18,7 @@ export interface ClaudeConfig { export function generateClaudeConfig(backendUrl: string, apiKey?: string, useMCP = false, mcpServerPath?: string): ClaudeConfig { if (useMCP) { - const backendMcpPath = mcpServerPath || path.join(process.cwd(), 'backend', 'dist', 'ai', 'mcp.js'); + const backendMcpPath = resolveOpenMemoryMcpServerPath(mcpServerPath); return { mcpServers: { openmemory: { diff --git a/apps/vscode-extension/src/writers/codex.ts b/apps/vscode-extension/src/writers/codex.ts index 8a1df989..710c4474 100644 --- a/apps/vscode-extension/src/writers/codex.ts +++ b/apps/vscode-extension/src/writers/codex.ts @@ -1,6 +1,7 @@ import * as fs from 'fs'; import * as path from 'path'; import * as os from 'os'; +import { resolveOpenMemoryMcpServerPath } from '../utils/mcpPath'; export interface CodexConfig { contextProviders?: { @@ -23,7 +24,7 @@ export interface CodexConfig { export function generateCodexConfig(backendUrl: string, apiKey?: string, useMCP = false, mcpServerPath?: string): CodexConfig { if (useMCP) { - const backendMcpPath = mcpServerPath || path.join(process.cwd(), 'backend', 'dist', 'ai', 'mcp.js'); + const backendMcpPath = resolveOpenMemoryMcpServerPath(mcpServerPath); const config: CodexConfig = { mcpServers: { openmemory: { diff --git a/apps/vscode-extension/src/writers/copilot.ts b/apps/vscode-extension/src/writers/copilot.ts index c8a8dc83..057df375 100644 --- a/apps/vscode-extension/src/writers/copilot.ts +++ b/apps/vscode-extension/src/writers/copilot.ts @@ -1,6 +1,7 @@ import * as fs from 'fs'; import * as path from 'path'; import * as os from 'os'; +import { resolveOpenMemoryMcpServerPath } from '../utils/mcpPath'; export interface CopilotConfig { name: string; @@ -19,7 +20,7 @@ export interface CopilotConfig { export function generateCopilotConfig(backendUrl: string, apiKey?: string, useMCP = false, mcpServerPath?: string): CopilotConfig { if (useMCP) { - const backendMcpPath = mcpServerPath || path.join(process.cwd(), 'backend', 'dist', 'ai', 'mcp.js'); + const backendMcpPath = resolveOpenMemoryMcpServerPath(mcpServerPath); const config: CopilotConfig = { name: 'OpenMemory', type: 'mcp', diff --git a/apps/vscode-extension/src/writers/cursor.ts b/apps/vscode-extension/src/writers/cursor.ts index 477336f3..5892353e 100644 --- a/apps/vscode-extension/src/writers/cursor.ts +++ b/apps/vscode-extension/src/writers/cursor.ts @@ -1,6 +1,7 @@ import * as fs from 'fs'; import * as path from 'path'; import * as os from 'os'; +import { resolveOpenMemoryMcpServerPath } from '../utils/mcpPath'; export interface CursorConfig { name: string; @@ -17,7 +18,7 @@ export interface CursorConfig { export function generateCursorConfig(backendUrl: string, apiKey?: string, useMCP = false, mcpServerPath?: string): CursorConfig { if (useMCP) { - const backendMcpPath = mcpServerPath || path.join(process.cwd(), 'backend', 'dist', 'ai', 'mcp.js'); + const backendMcpPath = resolveOpenMemoryMcpServerPath(mcpServerPath); return { name: 'OpenMemory', type: 'mcp', diff --git a/apps/vscode-extension/src/writers/windsurf.ts b/apps/vscode-extension/src/writers/windsurf.ts index 72179b06..34286605 100644 --- a/apps/vscode-extension/src/writers/windsurf.ts +++ b/apps/vscode-extension/src/writers/windsurf.ts @@ -1,6 +1,7 @@ import * as fs from 'fs'; import * as path from 'path'; import * as os from 'os'; +import { resolveOpenMemoryMcpServerPath } from '../utils/mcpPath'; export interface WindsurfConfig { contextProvider?: string; @@ -13,7 +14,7 @@ export interface WindsurfConfig { export function generateWindsurfConfig(backendUrl: string, apiKey?: string, useMCP = false, mcpServerPath?: string): WindsurfConfig { if (useMCP) { - const backendMcpPath = mcpServerPath || path.join(process.cwd(), 'backend', 'dist', 'ai', 'mcp.js'); + const backendMcpPath = resolveOpenMemoryMcpServerPath(mcpServerPath); return { contextProvider: 'openmemory-mcp', mcp: { diff --git a/docker-compose.yml b/docker-compose.yml index 49a23281..fce0139f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,15 +1,13 @@ -version: '3.8' - services: openmemory: build: context: ./packages/openmemory-js dockerfile: Dockerfile ports: - - '8080:8080' + - '${OM_PORT:-18080}:${OM_PORT:-18080}' environment: # Core Configuration - - OM_PORT=${OM_PORT:-8080} + - OM_PORT=${OM_PORT:-18080} - OM_MODE=${OM_MODE:-standard} - OM_TIER=${OM_TIER:-hybrid} - OM_DB_PATH=${OM_DB_PATH:-/data/openmemory.sqlite} @@ -122,7 +120,7 @@ services: - openmemory_data:/data restart: unless-stopped healthcheck: - test: ['CMD', 'wget', '--no-verbose', '--tries=1', '--spider', 'http://localhost:8080/health'] + test: ['CMD-SHELL', 'wget --no-verbose --tries=1 --spider "http://localhost:${OM_PORT:-18080}/health"'] interval: 30s timeout: 10s retries: 3 @@ -130,13 +128,14 @@ services: # Dashboard: Web UI for visualizing and managing memories dashboard: + profiles: ["dashboard"] build: context: ./dashboard dockerfile: Dockerfile ports: - '3000:3000' environment: - - NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL:-http://localhost:8080} + - NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL:-http://localhost:${OM_PORT:-18080}} - NEXT_PUBLIC_API_KEY=${NEXT_PUBLIC_API_KEY:-} depends_on: openmemory: diff --git a/docs/api-server.md b/docs/api-server.md index 828e0eab..04efde9e 100644 --- a/docs/api-server.md +++ b/docs/api-server.md @@ -2,7 +2,7 @@ OpenMemory exposes a REST API for language-agnostic integration. -**Base URL**: `http://localhost:8080` (default) +**Base URL**: `http://localhost:18080` (default) ## Endpoints @@ -19,7 +19,7 @@ Add a new memory. } ``` -### `POST /memory/search` +### `POST /memory/query` Search for memories. @@ -27,15 +27,15 @@ Search for memories. ```json { "query": "What is the pet name?", - "user_id": "user_123", - "limit": 3 + "k": 3, + "filters": { "user_id": "user_123" } } ``` **Response:** ```json { - "memories": [ + "matches": [ { "id": "mem_abc123", "content": "My cat's name is Luna", @@ -45,6 +45,36 @@ Search for memories. } ``` +### `PATCH /memory/:id` + +Update an existing memory. If `content` changes, embeddings are recomputed. + +**Headers:** +- `x-api-key: ` (required if auth is enabled) + +**Body:** +```json +{ + "content": "Updated content (optional)", + "tags": ["tag1", "tag2"], + "metadata": { "source": "manual" }, + "user_id": "user_123" +} +``` + +### `DELETE /memory/:id` + +Delete a memory by id (also removes vectors and waypoint links). + +**Headers:** +- `x-api-key: ` (required if auth is enabled) + +**Example:** +```bash +curl -X DELETE "http://localhost:18080/memory/mem_abc123?user_id=user_123" \ + -H "x-api-key: " +``` + ### `GET /health` Returns `200 OK` if the system is running. @@ -56,7 +86,7 @@ You can run the server using Docker or the Node CLI. ### Docker ```bash -docker run -p 8080:8080 openmemory/server +docker run -e OM_PORT=18080 -p 18080:18080 openmemory/server ``` ### CLI diff --git a/docs/mcp.md b/docs/mcp.md index aa02df08..2d75130e 100644 --- a/docs/mcp.md +++ b/docs/mcp.md @@ -243,6 +243,52 @@ Fetch a single memory by ID. } ``` +### openmemory_update + +Update a memory (content, tags, metadata). + +#### Parameters + +| Parameter | Type | Required | Default | Description | +|-----------|------|----------|---------|-------------| +| `id` | string | Yes | - | Memory identifier | +| `content` | string | No | - | New memory content | +| `tags` | string[] | No | - | Replace tags | +| `metadata` | object | No | - | Replace metadata | +| `user_id` | string | No | - | User identifier | + +#### Example + +```json +{ + "id": "mem_abc123", + "content": "Updated preference: user prefers email reports", + "tags": ["preference", "reporting"], + "metadata": { "source": "manual" }, + "user_id": "user_123" +} +``` + +### openmemory_delete + +Delete a memory by ID. + +#### Parameters + +| Parameter | Type | Required | Default | Description | +|-----------|------|----------|---------|-------------| +| `id` | string | Yes | - | Memory identifier | +| `user_id` | string | No | - | User identifier | + +#### Example + +```json +{ + "id": "mem_abc123", + "user_id": "user_123" +} +``` + ### openmemory_reinforce Boost salience of a memory. @@ -524,6 +570,29 @@ const result = await client.callTool({ // Returns: Excel ``` +### Update / Delete (Contextual Memory) + +Use these for cleanup, corrections, and redactions: + +```json +// Update an existing memory +{ + "id": "mem_abc123", + "content": "Corrected content", + "tags": ["corrected"], + "metadata": { "source": "cleanup" }, + "user_id": "user_123" +} +``` + +```json +// Delete an old / incorrect memory +{ + "id": "mem_abc123", + "user_id": "user_123" +} +``` + ### Historical Analysis ```json diff --git a/docs/multi-project.md b/docs/multi-project.md new file mode 100644 index 00000000..a702f260 --- /dev/null +++ b/docs/multi-project.md @@ -0,0 +1,43 @@ +# Multi-project Memory (Project by Default, Team by Explicit Opt-in) + +OpenMemory supports two safe patterns: + +## Option A (recommended): one instance per project + +This gives hard isolation by default (different DB volume + port + API key). + +1) Create an env file per project (examples: `.env.project.example`, `.env.team.example`). + +2) Start a project instance: + +```bash +docker compose --project-name om_ --env-file .env.project. up -d --build openmemory +``` + +3) Validate: + +```bash +curl http://localhost:/health +``` + +4) Stop: + +```bash +docker compose --project-name om_ down +``` + +### Team shared memory (explicit) + +Run a separate, clearly named instance and connect to it only when needed: + +```bash +docker compose --project-name om_team --env-file .env.team up -d --build openmemory +``` + +## Option B: one shared instance, but strict scoping via `user_id` + +This works only if your clients always send `user_id` (or `x-om-user-id` / `x-openmemory-user-id`). + +- VS Code extension: default `user_id` is now scoped per workspace project name; override with `openmemory.userId` if you want a shared team identity. +- HTTP context providers: include `user_id` in body for `/api/ide/context`, or set `x-om-user-id` header. + diff --git a/docs/node-sdk.md b/docs/node-sdk.md index 5520c59d..4ce2b49d 100644 --- a/docs/node-sdk.md +++ b/docs/node-sdk.md @@ -52,7 +52,7 @@ console.log(results[0].content); The Node package also contains the API server. ```bash -# Start the server on port 8080 +# Start the server (default port: 18080) npx openmemory-js serve # or npx opm serve diff --git a/docs/projects/wb-repricer.md b/docs/projects/wb-repricer.md new file mode 100644 index 00000000..89f2331c --- /dev/null +++ b/docs/projects/wb-repricer.md @@ -0,0 +1,111 @@ +# WB Repricer System — project-specific memory integration + +The WB Repricer System gets its own OpenMemory instance so every agent, IDE, test run, and CI pipeline has a private, project-scoped memory. Follow the three phases below. + +## 1. Start the project backend + +1. Copy `.env.project.example` to `.env.project.wb-repricer`. +2. Choose a unique port (e.g. `18090`) and a strong API key: + + ```bash + OM_PORT=18090 + OM_API_KEY=$(openssl rand -base64 32) + ``` + +3. Double-check the other values (`OM_METADATA_BACKEND=sqlite`, `OM_DB_PATH=/data/openmemory.sqlite`, etc.) so the DB lives inside the project container. +4. Run the project-specific instance: + + ```bash + docker compose \ + --project-name om_wb-repricer \ + --env-file .env.project.wb-repricer \ + up -d --build openmemory + ``` + +5. Validate with `curl http://localhost:18090/health` and stop with: + + ```bash + docker compose --project-name om_wb-repricer down + ``` + + +## 2. Wire the WB Repricer repo (backend + CI) + +Every process that should share the WB Repricer memory needs three settings: + + - `OPENMEMORY_URL=http://localhost:18090` + - `OPENMEMORY_API_KEY=` + - `OPENMEMORY_USER_ID=` (e.g. `repricer-ci`, `repricer-backend`, `repricer-logger`) + +Place them inside the repo’s env/config (e.g. `.env.memory`, `.github/workflows/memory.env`, or a secrets manager). Example `.env.memory`: + +```dotenv +OPENMEMORY_URL=http://localhost:18090 +OPENMEMORY_API_KEY=REPLACE_WITH_YOUR_SECRET +WB_REPRICER_MEMORY_USER=repricer-backend +``` + +Use that env file any time you start the service or run agents. For example: + +```bash +source .env.memory +OPENMEMORY_USER_ID=${WB_REPRICER_MEMORY_USER} python app.py +``` + + +## 3. Instructions for “Е агент” (E agents developer) + +1. **Clone + configure the repo** + - Use the `.env.memory` template above (or add the vars to your service manifest). + - Make sure `OPENMEMORY_URL`/`OM_PORT` point to the project instance (`18090`). + - Pass `OPENMEMORY_API_KEY` and a human-readable `OPENMEMORY_USER_ID` (`agent-core`, `e-agent`, etc.). + +2. **Adapt the code** + - For **Python** services or agents: + + ```python + import os + from openmemory.client import Memory + + mem = Memory() + mem.add( + "Agent noted repricer tweak", + user_id=os.environ.get("OPENMEMORY_USER_ID"), + metadata={ + "project": "WB Repricer System", + "repo": "repricer", + "branch": os.getenv("GIT_BRANCH"), + "source": "agent-core", + }, + ) + ``` + + - For **Node/JS** services: + + ```ts + import { Memory } from "openmemory-js"; + + const mem = new Memory(); + await mem.add("Adjusted repricer rates", { + user_id: process.env.OPENMEMORY_USER_ID, + metadata: { project: "WB Repricer System", source: "e-agent" }, + }); + ``` + +3. **Hook IDE + MCP tools** + - Set the VS Code extension to `http://localhost:18090` and the same API key. + - When the agent starts, it sends IDE events with `user_id` scoped to WB Repricer (the extension now generates one per workspace/ project name). + - For MCP clients (Claude/Cursor/Codex), update their config to `http://localhost:18090/mcp` and supply the API key in `x-api-key`. + +4. **Working with multiple agents/projects** + - Each agent uses `user_id` to identify its persona (`repricer-analyst`, `repricer-ci`, `repricer-agent`). + - Add `metadata.project` and `metadata.source` so queries can filter results per tool/branch. + - When the project is done, clean up via `docker compose --project-name om_wb-repricer down` (keeps memory isolated from other work). + +## 4. Troubleshooting + +- If you see `401`/`authentication_required`, confirm the API key (`x-api-key`) matches `.env.project.wb-repricer`. +- If duplicate ports clash, bump `OM_PORT` and update `OPENMEMORY_URL` accordingly (e.g. `http://localhost:18091`). +- When switching between projects, point each IDE/agent at the matching `backendUrl` and `user_id` to avoid leaking context. + +More general patterns are documented in `README.md` and `docs/multi-project.md`. Use this page as your “WB Repricer System” potion. diff --git a/packages/openmemory-js/Dockerfile b/packages/openmemory-js/Dockerfile index fac087d3..7060bbf1 100644 --- a/packages/openmemory-js/Dockerfile +++ b/packages/openmemory-js/Dockerfile @@ -58,12 +58,12 @@ RUN mkdir -p /data \ # Switch to non-root user USER appuser -# Expose the application port -EXPOSE 8080 +# Expose the default application port (override with OM_PORT) +EXPOSE 18080 # Define a lightweight health check that verifies the /health endpoint HEALTHCHECK --interval=30s --timeout=10s --start-period=30s --retries=3 \ - CMD node -e "require('http').get('http://localhost:8080/health', (res) => process.exit(res.statusCode === 200 ? 0 : 1)).on('error', () => process.exit(1))" + CMD node -e "const port=process.env.OM_PORT||18080;require('http').get(`http://localhost:${port}/health`,(res)=>process.exit(res.statusCode===200?0:1)).on('error',()=>process.exit(1))" # Start the application using npm -ENTRYPOINT ["npm", "start"] \ No newline at end of file +ENTRYPOINT ["npm", "start"] diff --git a/packages/openmemory-js/README.md b/packages/openmemory-js/README.md index b57f4f4b..fc90fcac 100644 --- a/packages/openmemory-js/README.md +++ b/packages/openmemory-js/README.md @@ -190,6 +190,8 @@ available mcp tools: - `openmemory_list` - list all memories - `openmemory_get` - get memory by id - `openmemory_reinforce` - reinforce a memory +- `openmemory_update` - update memory content/tags/metadata +- `openmemory_delete` - delete a memory --- diff --git a/packages/openmemory-js/bin/opm.js b/packages/openmemory-js/bin/opm.js index bf5e82eb..36d82039 100644 --- a/packages/openmemory-js/bin/opm.js +++ b/packages/openmemory-js/bin/opm.js @@ -19,7 +19,7 @@ const loadenv = () => { }; loadenv(); -const port = process.env.OM_PORT || '8080'; +const port = process.env.OM_PORT || '18080'; const url = process.env.OPENMEMORY_URL || `http://localhost:${port}`; const key = process.env.OPENMEMORY_API_KEY || process.env.OM_API_KEY || ''; @@ -46,7 +46,7 @@ options: -h, --help show help env vars: - OPENMEMORY_URL api url (default: http://localhost:8080) + OPENMEMORY_URL api url (default: http://localhost:18080) OPENMEMORY_API_KEY auth key OM_API_KEY alt auth key diff --git a/packages/openmemory-js/src/ai/mcp.ts b/packages/openmemory-js/src/ai/mcp.ts index 7f247c07..13a5fa16 100644 --- a/packages/openmemory-js/src/ai/mcp.ts +++ b/packages/openmemory-js/src/ai/mcp.ts @@ -9,6 +9,7 @@ import { hsg_query, reinforce_memory, sector_configs, + update_memory, } from "../memory/hsg"; import { q, all_async, memories_table, vector_store } from "../core/db"; import { getEmbeddingInfo } from "../memory/embed"; @@ -46,14 +47,19 @@ const fmt_matches = (matches: Awaited>) => }) .join("\n\n"); -const set_hdrs = (res: ServerResponse) => { - res.setHeader("Content-Type", "application/json"); +const set_cors_hdrs = (res: ServerResponse) => { res.setHeader("Access-Control-Allow-Origin", "*"); - res.setHeader("Access-Control-Allow-Methods", "POST,OPTIONS"); + res.setHeader("Access-Control-Allow-Methods", "GET,POST,DELETE,OPTIONS"); res.setHeader( "Access-Control-Allow-Headers", - "Content-Type,Authorization,Mcp-Session-Id", + "Content-Type,Authorization,x-api-key,mcp-session-id,mcp-protocol-version,last-event-id", ); + res.setHeader("Access-Control-Expose-Headers", "mcp-session-id"); +}; + +const set_json_hdrs = (res: ServerResponse) => { + res.setHeader("Content-Type", "application/json"); + set_cors_hdrs(res); }; const send_err = ( @@ -65,7 +71,7 @@ const send_err = ( ) => { if (!res.headersSent) { res.statusCode = status; - set_hdrs(res); + set_json_hdrs(res); res.end( JSON.stringify({ jsonrpc: "2.0", @@ -77,6 +83,11 @@ const send_err = ( }; const uid = (val?: string | null) => (val?.trim() ? val.trim() : undefined); +const default_user_id = uid( + process.env.OM_DEFAULT_USER_ID || + process.env.OPENMEMORY_DEFAULT_USER_ID || + process.env.OPENMEMORY_USER_ID, +); export const create_mcp_srv = () => { const srv = new McpServer( @@ -160,7 +171,7 @@ export const create_mcp_srv = () => { min_salience, user_id, }) => { - const u = uid(user_id); + const u = uid(user_id) ?? default_user_id; const results: any = { type, query }; const at_date = at ? new Date(at) : new Date(); @@ -316,7 +327,7 @@ export const create_mcp_srv = () => { ), }, async ({ content, type = "contextual", facts, tags, metadata, user_id }) => { - const u = uid(user_id); + const u = uid(user_id) ?? default_user_id; const results: any = { type }; // Validate facts are provided when needed @@ -450,7 +461,7 @@ export const create_mcp_srv = () => { .describe("Restrict results to a specific user identifier"), }, async ({ limit, sector, user_id }) => { - const u = uid(user_id); + const u = uid(user_id) ?? default_user_id; let rows: mem_row[]; if (u) { const all = await q.all_mem_by_user.all(u, limit ?? 10, 0); @@ -502,7 +513,7 @@ export const create_mcp_srv = () => { ), }, async ({ id, include_vectors, user_id }) => { - const u = uid(user_id); + const u = uid(user_id) ?? default_user_id; const mem = await q.get_mem.get(id); if (!mem) return { @@ -544,6 +555,134 @@ export const create_mcp_srv = () => { }, ); + srv.tool( + "openmemory_update", + "Update an existing memory (content, tags, metadata)", + { + id: z.string().min(1).describe("Memory identifier to update"), + content: z + .string() + .min(1) + .optional() + .describe("New memory content (omit to keep current)"), + tags: z + .array(z.string()) + .optional() + .describe("Replace tags (omit to keep current)"), + metadata: z + .record(z.any()) + .optional() + .describe("Replace metadata (omit to keep current)"), + user_id: z + .string() + .trim() + .min(1) + .optional() + .describe("Validate ownership against a specific user identifier"), + }, + async ({ id, content, tags, metadata, user_id }) => { + if ( + content === undefined && + tags === undefined && + metadata === undefined + ) { + throw new Error("No updates provided"); + } + const u = uid(user_id) ?? default_user_id; + const mem = await q.get_mem.get(id); + if (!mem) + return { + content: [ + { type: "text", text: `Memory ${id} not found.` }, + ], + }; + if (u && mem.user_id !== u) + return { + content: [ + { + type: "text", + text: `Memory ${id} not found for user ${u}.`, + }, + ], + }; + + const res = await update_memory(id, content, tags, metadata); + const owner = mem.user_id || u; + if (owner) { + update_user_summary(owner).catch((err) => + console.error("[MCP] user summary update failed:", err), + ); + } + return { + content: [ + { + type: "text", + text: `Updated memory ${id}`, + }, + { + type: "text", + text: JSON.stringify( + { + id, + updated: res.updated, + user_id: owner ?? null, + }, + null, + 2, + ), + }, + ], + }; + }, + ); + + srv.tool( + "openmemory_delete", + "Delete a memory by identifier", + { + id: z.string().min(1).describe("Memory identifier to delete"), + user_id: z + .string() + .trim() + .min(1) + .optional() + .describe("Validate ownership against a specific user identifier"), + }, + async ({ id, user_id }) => { + const u = uid(user_id) ?? default_user_id; + const mem = await q.get_mem.get(id); + if (!mem) + return { + content: [ + { type: "text", text: `Memory ${id} not found.` }, + ], + }; + if (u && mem.user_id !== u) + return { + content: [ + { + type: "text", + text: `Memory ${id} not found for user ${u}.`, + }, + ], + }; + await q.del_mem.run(id); + await vector_store.deleteVectors(id); + await q.del_waypoints.run(id, id); + const owner = mem.user_id || u; + if (owner) { + update_user_summary(owner).catch((err) => + console.error("[MCP] user summary update failed:", err), + ); + } + return { + content: [ + { type: "text", text: `Deleted memory ${id}` }, + ], + }; + }, + ); + srv.resource( "openmemory-config", "openmemory://config", @@ -568,6 +707,8 @@ export const create_mcp_srv = () => { "openmemory_reinforce", "openmemory_list", "openmemory_get", + "openmemory_update", + "openmemory_delete", ], }; return { @@ -631,14 +772,24 @@ export const mcp = (app: any) => { const handle_req = async (req: any, res: any) => { try { await srv_ready; - const pay = await extract_pay(req); - if (!pay || typeof pay !== "object") { - send_err(res, -32600, "Request body must be a JSON object"); + set_cors_hdrs(res); + + if (req.method === "POST") { + const pay = await extract_pay(req); + if (!pay || typeof pay !== "object") { + send_err(res, -32600, "Request body must be a JSON object"); + return; + } + console.error("[MCP] Incoming request:", JSON.stringify(pay)); + await trans.handleRequest(req, res, pay); return; } - console.error("[MCP] Incoming request:", JSON.stringify(pay)); - set_hdrs(res); - await trans.handleRequest(req, res, pay); + + // Streamable HTTP transport supports: + // - GET for SSE stream (optional but used by many clients) + // - DELETE to close session + // - other methods return an error per transport + await trans.handleRequest(req, res, undefined); } catch (error) { console.error("[MCP] Error handling request:", error); if (error instanceof SyntaxError) { @@ -656,27 +807,21 @@ export const mcp = (app: any) => { } }; - app.post("/mcp", (req: any, res: any) => { + app.all("/mcp", (req: any, res: any) => { + if (req.method === "OPTIONS") { + res.statusCode = 204; + set_cors_hdrs(res); + res.end(); + return; + } void handle_req(req, res); }); + app.options("/mcp", (_req: any, res: any) => { res.statusCode = 204; - set_hdrs(res); + set_cors_hdrs(res); res.end(); }); - - const method_not_allowed = (_req: IncomingMessage, res: ServerResponse) => { - send_err( - res, - -32600, - "Method not supported. Use POST /mcp with JSON payload.", - null, - 405, - ); - }; - app.get("/mcp", method_not_allowed); - app.delete("/mcp", method_not_allowed); - app.put("/mcp", method_not_allowed); }; export const start_mcp_stdio = async () => { diff --git a/packages/openmemory-js/src/core/cfg.ts b/packages/openmemory-js/src/core/cfg.ts index b22cdab0..ae868543 100644 --- a/packages/openmemory-js/src/core/cfg.ts +++ b/packages/openmemory-js/src/core/cfg.ts @@ -16,12 +16,16 @@ const get_tier = (): tier => { return "hybrid"; }; export const tier = get_tier(); -const tier_dims = { fast: 1536, smart: 1536, deep: 1536, hybrid: 1536 }; +// Defaults mirror `.env.example` and docs: +// - fast/hybrid: 256-dim synthetic +// - smart: 256-dim synthetic + 128-dim compressed semantic => 384 +// - deep: full semantic embeddings (typically 1536) +const tier_dims = { fast: 256, smart: 384, deep: 1536, hybrid: 256 }; const tier_cache = { fast: 2, smart: 3, deep: 5, hybrid: 3 }; const tier_max_active = { fast: 32, smart: 64, deep: 128, hybrid: 64 }; export const env = { - port: num(process.env.OM_PORT, 8080), + port: num(process.env.OM_PORT ?? process.env.PORT, 18080), db_path: str( process.env.OM_DB_PATH, path.resolve(__dirname, "../../data/openmemory.sqlite"), @@ -76,7 +80,11 @@ export const env = { process.env.OM_METADATA_BACKEND, "sqlite", ).toLowerCase(), - vector_backend: str(process.env.OM_VECTOR_BACKEND, "postgres").toLowerCase(), + // By default, vectors follow the metadata backend unless explicitly overridden (e.g. "valkey"). + vector_backend: str( + process.env.OM_VECTOR_BACKEND, + process.env.OM_METADATA_BACKEND || "sqlite", + ).toLowerCase(), valkey_host: str(process.env.OM_VALKEY_HOST, "localhost"), valkey_port: num(process.env.OM_VALKEY_PORT, 6379), valkey_password: process.env.OM_VALKEY_PASSWORD, diff --git a/packages/openmemory-js/src/server/index.ts b/packages/openmemory-js/src/server/index.ts index 4f5d34f5..13dcb4b8 100644 --- a/packages/openmemory-js/src/server/index.ts +++ b/packages/openmemory-js/src/server/index.ts @@ -48,7 +48,7 @@ app.use((req: any, res: any, next: any) => { ); res.setHeader( "Access-Control-Allow-Headers", - "Content-Type,Authorization,x-api-key", + "Content-Type,Authorization,x-api-key,x-om-user-id,x-openmemory-user-id", ); if (req.method === "OPTIONS") { res.status(200).end(); diff --git a/packages/openmemory-js/src/server/routes/ide.ts b/packages/openmemory-js/src/server/routes/ide.ts index df46b3ee..e32cedcb 100644 --- a/packages/openmemory-js/src/server/routes/ide.ts +++ b/packages/openmemory-js/src/server/routes/ide.ts @@ -4,6 +4,15 @@ import { update_user_summary } from "../../memory/user_summary"; import { j, p } from "../../utils"; import * as crypto from "crypto"; export function ide(app: any) { + const hdr_user_id = (req: any): string | undefined => { + const h = + req?.headers?.["x-om-user-id"] ?? + req?.headers?.["x-openmemory-user-id"]; + if (typeof h === "string") return h; + if (Array.isArray(h) && typeof h[0] === "string") return h[0]; + return undefined; + }; + app.post("/api/ide/events", async (req: any, res: any) => { try { const event_type = req.body.event_type; @@ -11,7 +20,7 @@ export function ide(app: any) { const content = req.body.content || ""; const session_id = req.body.session_id || "default"; const metadata = req.body.metadata || {}; - const user_id = req.body.user_id || "anonymous"; + const user_id = req.body.user_id || hdr_user_id(req) || "anonymous"; if (!event_type) return res.status(400).json({ err: "event_type_required" }); @@ -72,10 +81,11 @@ export function ide(app: any) { const k = req.body.k || req.body.limit || 5; const session_id = req.body.session_id; const file_path = req.body.file_path; + const user_id = req.body.user_id || hdr_user_id(req); if (!query) return res.status(400).json({ err: "query_required" }); - const results = await hsg_query(query, k); + const results = await hsg_query(query, k, user_id ? { user_id } : undefined); let filtered = results; diff --git a/packages/openmemory-py/pyproject.toml b/packages/openmemory-py/pyproject.toml index d4589749..0639c5c4 100644 --- a/packages/openmemory-py/pyproject.toml +++ b/packages/openmemory-py/pyproject.toml @@ -12,6 +12,8 @@ dependencies = [ "pydantic", "numpy", "httpx", + "python-dotenv>=1.0", + "PyYAML>=6.0", "google-api-python-client>=2.0", "google-auth>=2.0", "notion-client>=2.0", @@ -24,5 +26,12 @@ dependencies = [ "openai>=1.0", ] +[project.optional-dependencies] +dev = [ + "pytest>=7.0", + "pytest-asyncio>=0.21", + "black>=23.0", +] + [tool.hatch.build.targets.wheel] packages = ["src/openmemory"] diff --git a/packages/openmemory-py/src/openmemory/connectors/langchain.py b/packages/openmemory-py/src/openmemory/connectors/langchain.py index fa0400c9..0a1c7c1c 100644 --- a/packages/openmemory-py/src/openmemory/connectors/langchain.py +++ b/packages/openmemory-py/src/openmemory/connectors/langchain.py @@ -7,8 +7,29 @@ from langchain_core.callbacks import CallbackManagerForRetrieverRun except ImportError: # Optional dependencies - BaseChatMessageHistory = object - BaseRetriever = object + class BaseChatMessageHistory: # type: ignore + pass + + class BaseRetriever: # type: ignore + pass + + class BaseMessage: # type: ignore + def __init__(self, content: str = ""): + self.content = content + + class HumanMessage(BaseMessage): # type: ignore + pass + + class AIMessage(BaseMessage): # type: ignore + pass + + class Document: # type: ignore + def __init__(self, page_content: str, metadata: Any | None = None): + self.page_content = page_content + self.metadata = metadata or {} + + class CallbackManagerForRetrieverRun: # type: ignore + pass from ..main import Memory diff --git a/render.yaml b/render.yaml index d073b721..7e9ef708 100644 --- a/render.yaml +++ b/render.yaml @@ -10,5 +10,3 @@ services: envVars: - key: NODE_ENV value: production - - key: OM_PORT - value: 8080 diff --git a/scripts/run-pr-tests.sh b/scripts/run-pr-tests.sh new file mode 100755 index 00000000..ebaf01ee --- /dev/null +++ b/scripts/run-pr-tests.sh @@ -0,0 +1,895 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +LOG_DIR="${ROOT_DIR}/test-results" +STAMP="$(date -u +"%Y%m%dT%H%M%SZ")" +RUN_DIR="${LOG_DIR}/pr112-${STAMP}" +LOG_FILE="${RUN_DIR}/run.log" +REPORT_FILE="${LOG_DIR}/PR112-validation-report.md" + +mkdir -p "${RUN_DIR}" + +exec > >(tee -a "${LOG_FILE}") 2>&1 + +echo "[pr-tests] start: $(date -u +"%Y-%m-%dT%H:%M:%SZ")" +echo "[pr-tests] repo: ${ROOT_DIR}" +echo "[pr-tests] run_dir: ${RUN_DIR}" + +require_cmd() { + if ! command -v "$1" >/dev/null 2>&1; then + echo "[pr-tests] missing required command: $1" >&2 + exit 127 + fi +} + +section() { + echo + echo "============================================================" + echo "[pr-tests] $1" + echo "============================================================" +} + +pick_free_port() { + python3 - <<'PY' +import socket +for port in range(18081, 18151): + s = socket.socket() + try: + s.bind(("127.0.0.1", port)) + print(port) + raise SystemExit(0) + except OSError: + continue + finally: + try: + s.close() + except Exception: + pass +raise SystemExit("no free port in range 18081-18150") +PY +} + +expect_http_code() { + local want="$1" + local got="$2" + local label="$3" + if [[ "${got}" != "${want}" ]]; then + echo "[pr-tests] ❌ ${label}: expected HTTP ${want}, got ${got}" >&2 + return 1 + fi + echo "[pr-tests] ✅ ${label}: HTTP ${got}" +} + +extract_mcp_session_id() { + local headers_file="$1" + python3 - <<'PY' "${headers_file}" +import re, sys +headers = open(sys.argv[1], "r", encoding="utf-8", errors="ignore").read().splitlines() +for line in headers: + m = re.match(r"(?i)^mcp-session-id:\\s*(.+?)\\s*$", line.strip()) + if m: + print(m.group(1).strip()) + raise SystemExit(0) +print("") +raise SystemExit(0) +PY +} + +json_extract() { + local file="$1" + local expr="$2" + python3 - <<'PY' "${file}" "${expr}" +import json, sys +data = json.load(open(sys.argv[1], "r", encoding="utf-8")) +expr = sys.argv[2].strip() + +def get_path(obj, path): + cur = obj + for part in path.split("."): + if part.endswith("]"): + name, idx = part[:-1].split("[", 1) + if name: + cur = cur[name] + cur = cur[int(idx)] + else: + cur = cur[part] + return cur + +val = get_path(data, expr) +if isinstance(val, (dict, list)): + print(json.dumps(val)) +else: + print(val) +PY +} + +run_in_docker_node() { + local image="node:22-bullseye" + docker run --rm \ + -v "${ROOT_DIR}:/repo" \ + -w "/repo/packages/openmemory-js" \ + -e OM_DB_URL="sqlite:///:memory:" \ + -e OM_TIER="fast" \ + -e OM_VEC_DIM="1536" \ + "${image}" \ + bash -lc 'set -euo pipefail; apt-get update -y >/dev/null; apt-get install -y python3 make g++ >/dev/null; node -v; npm -v; npm ci; npm run build; npx tsx tests/test_omnibus.ts' +} + +run_in_docker_python() { + local image="python:3.11" + docker run --rm \ + -v "${ROOT_DIR}:/repo" \ + -w "/repo/packages/openmemory-py" \ + "${image}" \ + bash -lc 'set -euo pipefail; python --version; pip --version; pip install -e ".[dev]"; pytest tests/test_omnibus.py -v' +} + +run_mcp_http_smoke() { + local image_tag="$1" + + local api_key="pr112-test-key" + local default_user="pr112-user" + local host_port + host_port="$(pick_free_port)" + + local container_port="18080" + local container_name="openmemory-pr112-${STAMP}" + local volume_name="openmemory-pr112-data-${STAMP}" + + section "Run container for MCP/HTTP smoke (port ${host_port} -> ${container_port})" + echo "[pr-tests] building/running with api_key=${api_key} default_user=${default_user}" + + # Persist identifiers outside of function scope because EXIT traps run + # after locals are unset (macOS bash + set -u). + PRTEST_CONTAINER_NAME="${container_name}" + PRTEST_VOLUME_NAME="${volume_name}" + + docker volume create "${volume_name}" >/dev/null + + cleanup() { + if [[ "${KEEP_DOCKER:-}" == "1" ]]; then + echo "[pr-tests] KEEP_DOCKER=1 set; skipping container/volume cleanup" + return + fi + if [[ -n "${PRTEST_CONTAINER_NAME:-}" ]]; then + docker rm -f "${PRTEST_CONTAINER_NAME}" >/dev/null 2>&1 || true + fi + if [[ -n "${PRTEST_VOLUME_NAME:-}" ]]; then + docker volume rm -f "${PRTEST_VOLUME_NAME}" >/dev/null 2>&1 || true + fi + } + trap cleanup EXIT + + docker run -d --rm \ + --name "${container_name}" \ + -e "OM_PORT=${container_port}" \ + -e "OM_API_KEY=${api_key}" \ + -e "OM_DEFAULT_USER_ID=${default_user}" \ + -e "OM_USE_SUMMARY_ONLY=false" \ + -e "OM_MAX_PAYLOAD_SIZE=1048576" \ + -e "OM_MODE=standard" \ + -e "OM_TIER=hybrid" \ + -e "OM_EMBEDDINGS=synthetic" \ + -e "OM_EMBEDDING_FALLBACK=synthetic" \ + -e "OM_METADATA_BACKEND=sqlite" \ + -e "OM_VECTOR_BACKEND=sqlite" \ + -e "OM_DB_PATH=/data/openmemory.sqlite" \ + -v "${volume_name}:/data" \ + -p "${host_port}:${container_port}" \ + "${image_tag}" >/dev/null + + local base="http://127.0.0.1:${host_port}" + + section "Wait for /health" + local deadline=$((SECONDS + 60)) + until curl -fsS "${base}/health" >/dev/null 2>&1; do + if (( SECONDS > deadline )); then + echo "[pr-tests] ❌ healthcheck timeout; container logs:" >&2 + docker logs "${container_name}" >&2 || true + exit 1 + fi + sleep 1 + done + curl -fsS "${base}/health" | tee "${RUN_DIR}/health.json" >/dev/null + echo "[pr-tests] ✅ healthy: ${base}" + + section "Auth required (HTTP)" + local code + code="$(curl -sS -o /dev/null -w "%{http_code}" "${base}/memory/all")" + expect_http_code "401" "${code}" "GET /memory/all without key" + + code="$(curl -sS -o /dev/null -w "%{http_code}" -H "x-api-key: ${api_key}" "${base}/memory/all")" + expect_http_code "200" "${code}" "GET /memory/all with key" + + section "HTTP CRUD: add/get/patch/delete" + curl -sS -H "Content-Type: application/json" -H "x-api-key: ${api_key}" \ + --data "$(python3 - <<'PY' +import json +print(json.dumps({ + "content": "http-add-content", + "tags": ["pr112", "http"], + "metadata": {"source": "pr112-run-pr-tests"}, + "user_id": "pr112-user", +})) +PY +)" \ + "${base}/memory/add" | tee "${RUN_DIR}/http-memory-add.json" >/dev/null + + local http_id + http_id="$(python3 - <<'PY' "${RUN_DIR}/http-memory-add.json" +import json, sys +data = json.load(open(sys.argv[1])) +print(data["id"]) +PY +)" + echo "[pr-tests] http memory id: ${http_id}" + + curl -sS -H "x-api-key: ${api_key}" \ + "${base}/memory/${http_id}?user_id=${default_user}" | tee "${RUN_DIR}/http-memory-get.json" >/dev/null + + curl -sS -X PATCH -H "Content-Type: application/json" -H "x-api-key: ${api_key}" \ + --data "$(python3 - <<'PY' +import json +print(json.dumps({ + "content": "http-updated-content", + "tags": ["pr112", "http", "updated"], + "metadata": {"source": "pr112-run-pr-tests", "updated": True}, + "user_id": "pr112-user", +})) +PY +)" \ + "${base}/memory/${http_id}" | tee "${RUN_DIR}/http-memory-patch.json" >/dev/null + + curl -sS -H "x-api-key: ${api_key}" \ + "${base}/memory/${http_id}?user_id=${default_user}" | tee "${RUN_DIR}/http-memory-get-after-patch.json" >/dev/null + + local http_content + http_content="$(python3 - <<'PY' "${RUN_DIR}/http-memory-get-after-patch.json" +import json, sys +data = json.load(open(sys.argv[1])) +print(data.get("content","")) +PY +)" + if [[ "${http_content}" != "http-updated-content" ]]; then + echo "[pr-tests] ❌ HTTP PATCH did not update content (got '${http_content}')" >&2 + exit 1 + fi + echo "[pr-tests] ✅ HTTP PATCH updated content" + + curl -sS -X DELETE -H "x-api-key: ${api_key}" \ + "${base}/memory/${http_id}?user_id=${default_user}" | tee "${RUN_DIR}/http-memory-delete.json" >/dev/null + + code="$(curl -sS -o /dev/null -w "%{http_code}" -H "x-api-key: ${api_key}" "${base}/memory/${http_id}?user_id=${default_user}")" + expect_http_code "404" "${code}" "GET /memory/:id after delete" + + section "MCP transport: GET /mcp (SSE headers)" + set +e + curl -sS -D "${RUN_DIR}/mcp-sse.headers" -o /dev/null \ + --max-time 2 \ + -H "Accept: text/event-stream" \ + -H "x-api-key: ${api_key}" \ + "${base}/mcp" + local curl_rc=$? + set -e + if [[ "${curl_rc}" -ne 0 && "${curl_rc}" -ne 18 && "${curl_rc}" -ne 28 ]]; then + echo "[pr-tests] ❌ SSE probe curl failed with rc=${curl_rc}" >&2 + exit 1 + fi + + python3 - <<'PY' "${RUN_DIR}/mcp-sse.headers" +import sys +hdr = open(sys.argv[1], "r", encoding="utf-8", errors="ignore").read().lower() +assert "200" in hdr.splitlines()[0] +assert "content-type:" in hdr +assert "text/event-stream" in hdr +print("ok") +PY + echo "[pr-tests] ✅ SSE headers OK" + + section "MCP initialize + tools/list" + curl -sS -D "${RUN_DIR}/mcp-init.headers" -o "${RUN_DIR}/mcp-init.json" \ + -H "Content-Type: application/json" \ + -H "Accept: application/json, text/event-stream" \ + -H "x-api-key: ${api_key}" \ + --data "$(python3 - <<'PY' +import json +print(json.dumps({ + "jsonrpc":"2.0", + "id":1, + "method":"initialize", + "params":{ + "protocolVersion":"2024-11-05", + "capabilities":{}, + "clientInfo":{"name":"pr112-run-pr-tests","version":"0.0.0"} + } +})) +PY +)" \ + "${base}/mcp" + + local mcp_sid + mcp_sid="$(extract_mcp_session_id "${RUN_DIR}/mcp-init.headers")" + if [[ -n "${mcp_sid}" ]]; then + echo "[pr-tests] mcp-session-id: ${mcp_sid}" + else + echo "[pr-tests] mcp-session-id: (none; stateless transport)" + fi + + mcp_session_header() { + if [[ -n "${mcp_sid}" ]]; then + printf '%s\n' "-H" "mcp-session-id: ${mcp_sid}" + fi + } + + curl -sS -D "${RUN_DIR}/mcp-tools.headers" -o "${RUN_DIR}/mcp-tools.json" \ + -H "Content-Type: application/json" \ + -H "Accept: application/json, text/event-stream" \ + -H "x-api-key: ${api_key}" \ + $(mcp_session_header) \ + --data '{"jsonrpc":"2.0","id":2,"method":"tools/list","params":{}}' \ + "${base}/mcp" + + python3 - <<'PY' "${RUN_DIR}/mcp-tools.json" +import json, sys +data = json.load(open(sys.argv[1])) +names = [t["name"] for t in data["result"]["tools"]] +need = ["openmemory_store","openmemory_query","openmemory_list","openmemory_get","openmemory_reinforce","openmemory_update","openmemory_delete"] +missing = [n for n in need if n not in names] +if missing: + raise SystemExit("missing tools: " + ", ".join(missing)) +print("ok") +PY + echo "[pr-tests] ✅ tools/list includes update/delete" + + section "MCP: store (default user_id from env) -> get -> update -> get -> reinforce -> delete -> get" + curl -sS -o "${RUN_DIR}/mcp-store.json" \ + -H "Content-Type: application/json" \ + -H "Accept: application/json, text/event-stream" \ + -H "x-api-key: ${api_key}" \ + $(mcp_session_header) \ + --data "$(python3 - <<'PY' +import json +print(json.dumps({ + "jsonrpc":"2.0", + "id":3, + "method":"tools/call", + "params":{ + "name":"openmemory_store", + "arguments":{ + "content":"mcp-store-content", + "tags":["pr112","mcp"], + "metadata":{"source":"pr112-run-pr-tests"} + } + } +})) +PY +)" \ + "${base}/mcp" + + local mcp_store_json + mcp_store_json="$(json_extract "${RUN_DIR}/mcp-store.json" "result.content[1].text")" + echo "${mcp_store_json}" > "${RUN_DIR}/mcp-store.payload.json" + local mcp_id + mcp_id="$(python3 - <<'PY' "${RUN_DIR}/mcp-store.payload.json" +import json, sys +payload = json.loads(open(sys.argv[1]).read()) +print(payload["hsg"]["id"]) +PY +)" + local mcp_user + mcp_user="$(python3 - <<'PY' "${RUN_DIR}/mcp-store.payload.json" +import json, sys +payload = json.loads(open(sys.argv[1]).read()) +print(payload.get("user_id") or "") +PY +)" + echo "[pr-tests] stored mcp id: ${mcp_id} (user_id='${mcp_user}')" + if [[ "${mcp_user}" != "${default_user}" ]]; then + echo "[pr-tests] ❌ MCP default user_id mismatch (expected '${default_user}', got '${mcp_user}')" >&2 + exit 1 + fi + echo "[pr-tests] ✅ MCP store used default user_id" + + curl -sS -o "${RUN_DIR}/mcp-get.json" \ + -H "Content-Type: application/json" \ + -H "Accept: application/json, text/event-stream" \ + -H "x-api-key: ${api_key}" \ + $(mcp_session_header) \ + --data "$(python3 - < "${RUN_DIR}/mcp-get.payload.json" + python3 - <<'PY' "${RUN_DIR}/mcp-get.payload.json" +import json, sys +payload = json.loads(open(sys.argv[1]).read()) +assert payload["content"] == "mcp-store-content" +print("ok") +PY + echo "[pr-tests] ✅ MCP get returned full content" + + section "MCP: large content is not truncated (OM_USE_SUMMARY_ONLY=false)" + python3 - <<'PY' > "${RUN_DIR}/mcp-big-content.txt" +tail = "<<>>" +print(("x" * 20000) + tail) +PY + + curl -sS -o "${RUN_DIR}/mcp-store-big.json" \ + -H "Content-Type: application/json" \ + -H "Accept: application/json, text/event-stream" \ + -H "x-api-key: ${api_key}" \ + $(mcp_session_header) \ + --data "$(python3 - < "${RUN_DIR}/mcp-store-big.payload.json" + local mcp_big_id + mcp_big_id="$(python3 - <<'PY' "${RUN_DIR}/mcp-store-big.payload.json" +import json, sys +payload = json.loads(open(sys.argv[1]).read()) +print(payload["hsg"]["id"]) +PY +)" + + curl -sS -o "${RUN_DIR}/mcp-get-big.json" \ + -H "Content-Type: application/json" \ + -H "Accept: application/json, text/event-stream" \ + -H "x-api-key: ${api_key}" \ + $(mcp_session_header) \ + --data "$(python3 - < "${RUN_DIR}/mcp-get-big.payload.json" + python3 - <<'PY' "${RUN_DIR}/mcp-get-big.payload.json" +import json, sys +payload = json.loads(open(sys.argv[1]).read()) +content = payload["content"] +assert content.rstrip("\n").endswith("<<>>") +assert len(content) > 20000 +print("ok") +PY + echo "[pr-tests] ✅ MCP get preserved big content tail" + + curl -sS -o "${RUN_DIR}/mcp-delete-big.json" \ + -H "Content-Type: application/json" \ + -H "Accept: application/json, text/event-stream" \ + -H "x-api-key: ${api_key}" \ + $(mcp_session_header) \ + --data "$(python3 - < "${RUN_DIR}/mcp-get-after-update.payload.json" + python3 - <<'PY' "${RUN_DIR}/mcp-get-after-update.payload.json" +import json, sys +payload = json.loads(open(sys.argv[1]).read()) +assert payload["content"] == "mcp-updated-content" +assert payload["metadata"]["updated"] is True +assert "updated" in payload["tags"] +print("ok") +PY + echo "[pr-tests] ✅ MCP update applied (content/tags/metadata)" + + curl -sS -o "${RUN_DIR}/mcp-reinforce.json" \ + -H "Content-Type: application/json" \ + -H "Accept: application/json, text/event-stream" \ + -H "x-api-key: ${api_key}" \ + $(mcp_session_header) \ + --data "$(python3 - <&2 + echo "[pr-tests] payload: ${after_delete_text}" >&2 + exit 1 + fi + echo "[pr-tests] ✅ MCP delete removed memory" + + section "MCP transport: DELETE /mcp closes session" + code="$(curl -sS -o /dev/null -w "%{http_code}" -X DELETE \ + -H "Accept: application/json, text/event-stream" \ + -H "x-api-key: ${api_key}" \ + $(mcp_session_header) \ + "${base}/mcp")" + if [[ "${code}" != "200" && "${code}" != "204" ]]; then + echo "[pr-tests] ❌ DELETE /mcp unexpected HTTP ${code}" >&2 + exit 1 + fi + echo "[pr-tests] ✅ DELETE /mcp returned HTTP ${code}" + + section "Persistence: restart container keeps stored memories" + curl -sS -H "Content-Type: application/json" -H "x-api-key: ${api_key}" \ + --data "$(python3 - <<'PY' +import json +print(json.dumps({ + "content": "persistence-check-content", + "tags": ["pr112", "persist"], + "metadata": {"source": "pr112-run-pr-tests"}, + "user_id": "pr112-user", +})) +PY +)" \ + "${base}/memory/add" | tee "${RUN_DIR}/persist-add.json" >/dev/null + local persist_id + persist_id="$(python3 - <<'PY' "${RUN_DIR}/persist-add.json" +import json, sys +data = json.load(open(sys.argv[1])) +print(data["id"]) +PY +)" + + docker rm -f "${container_name}" >/dev/null + docker run -d --rm \ + --name "${container_name}" \ + -e "OM_PORT=${container_port}" \ + -e "OM_API_KEY=${api_key}" \ + -e "OM_DEFAULT_USER_ID=${default_user}" \ + -e "OM_USE_SUMMARY_ONLY=false" \ + -e "OM_MAX_PAYLOAD_SIZE=1048576" \ + -e "OM_MODE=standard" \ + -e "OM_TIER=hybrid" \ + -e "OM_EMBEDDINGS=synthetic" \ + -e "OM_EMBEDDING_FALLBACK=synthetic" \ + -e "OM_METADATA_BACKEND=sqlite" \ + -e "OM_VECTOR_BACKEND=sqlite" \ + -e "OM_DB_PATH=/data/openmemory.sqlite" \ + -v "${volume_name}:/data" \ + -p "${host_port}:${container_port}" \ + "${image_tag}" >/dev/null + + deadline=$((SECONDS + 60)) + until curl -fsS "${base}/health" >/dev/null 2>&1; do + if (( SECONDS > deadline )); then + echo "[pr-tests] ❌ healthcheck timeout after restart; container logs:" >&2 + docker logs "${container_name}" >&2 || true + exit 1 + fi + sleep 1 + done + + code="$(curl -sS -o /dev/null -w "%{http_code}" -H "x-api-key: ${api_key}" "${base}/memory/${persist_id}?user_id=${default_user}")" + expect_http_code "200" "${code}" "GET /memory/:id after restart (persistence)" + + curl -sS -H "x-api-key: ${api_key}" \ + "${base}/memory/${persist_id}?user_id=${default_user}" | tee "${RUN_DIR}/persist-get-after-restart.json" >/dev/null + + python3 - <<'PY' "${RUN_DIR}/persist-get-after-restart.json" +import json, sys +data = json.load(open(sys.argv[1])) +assert data.get("content") == "persistence-check-content" +print("ok") +PY + echo "[pr-tests] ✅ persistence content verified" +} + +write_report() { + section "Write Markdown report" + local big_len="unknown" + if [[ -f "${RUN_DIR}/mcp-big-content.txt" ]]; then + big_len="$(python3 -c "print(len(open('${RUN_DIR}/mcp-big-content.txt','r').read()))")" + fi + { + echo "# PR112 Validation Report" + echo + echo "- Generated (UTC): $(date -u +"%Y-%m-%dT%H:%M:%SZ")" + echo "- Repo: ${ROOT_DIR}" + echo "- Commit: $(git -C "${ROOT_DIR}" rev-parse HEAD)" + echo "- Script: scripts/run-pr-tests.sh" + echo "- Run dir: ${RUN_DIR}" + echo + echo "## Test Script (full)" + echo + echo "\`\`\`bash" + cat "${ROOT_DIR}/scripts/run-pr-tests.sh" + echo "\`\`\`" + echo + echo "## What Was Tested" + echo + echo "### SDK (containerized)" + echo "- Node SDK omnibus: build + tests/test_omnibus.ts" + echo "- Python SDK omnibus: pytest tests/test_omnibus.py" + echo + echo "### Service (Docker image)" + echo "- Build image from packages/openmemory-js/Dockerfile" + echo "- Start container + /health" + echo "- Auth required for HTTP API" + echo "- HTTP CRUD: /memory/add, /memory/:id (GET/PATCH/DELETE)" + echo "- MCP transport: GET SSE headers, POST JSON-RPC, DELETE /mcp" + echo "- MCP tools: tools/list includes update/delete; store/get/update/reinforce/delete flow" + echo "- Default user_id from OM_DEFAULT_USER_ID for MCP calls without user_id" + echo "- Persistence: restart container with same volume keeps data" + echo + echo "## Results (artifacts)" + echo + echo "- Log: ${LOG_FILE}" + echo "- Directory with captured responses: ${RUN_DIR}/" + echo + echo "### Health" + echo "\`\`\`json" + cat "${RUN_DIR}/health.json" + echo "\`\`\`" + echo + echo "### HTTP CRUD" + echo "**Add** \`${RUN_DIR}/http-memory-add.json\`" + echo "\`\`\`json" + cat "${RUN_DIR}/http-memory-add.json" + echo "\`\`\`" + echo + echo "**Get (before patch)** \`${RUN_DIR}/http-memory-get.json\`" + echo "\`\`\`json" + cat "${RUN_DIR}/http-memory-get.json" + echo "\`\`\`" + echo + echo "**Patch** \`${RUN_DIR}/http-memory-patch.json\`" + echo "\`\`\`json" + cat "${RUN_DIR}/http-memory-patch.json" + echo "\`\`\`" + echo + echo "**Get (after patch)** \`${RUN_DIR}/http-memory-get-after-patch.json\`" + echo "\`\`\`json" + cat "${RUN_DIR}/http-memory-get-after-patch.json" + echo "\`\`\`" + echo + echo "**Delete** \`${RUN_DIR}/http-memory-delete.json\`" + echo "\`\`\`json" + cat "${RUN_DIR}/http-memory-delete.json" + echo "\`\`\`" + echo + echo "### MCP (SSE headers probe)" + echo "\`\`\`text" + sed -n '1,30p' "${RUN_DIR}/mcp-sse.headers" + echo "\`\`\`" + echo + echo "### MCP initialize" + echo "**Headers** \`${RUN_DIR}/mcp-init.headers\`" + echo "\`\`\`text" + sed -n '1,50p' "${RUN_DIR}/mcp-init.headers" + echo "\`\`\`" + echo + echo "**Body** \`${RUN_DIR}/mcp-init.json\`" + echo "\`\`\`json" + cat "${RUN_DIR}/mcp-init.json" + echo "\`\`\`" + echo + echo "### MCP tools/list" + echo "\`\`\`json" + cat "${RUN_DIR}/mcp-tools.json" + echo "\`\`\`" + echo + echo "### MCP store/get/update/delete (small content)" + echo "**Store response** \`${RUN_DIR}/mcp-store.json\`" + echo "\`\`\`json" + cat "${RUN_DIR}/mcp-store.json" + echo "\`\`\`" + echo + echo "**Store payload (parsed)** \`${RUN_DIR}/mcp-store.payload.json\`" + echo "\`\`\`json" + cat "${RUN_DIR}/mcp-store.payload.json" + echo "\`\`\`" + echo + echo "**Get payload (parsed)** \`${RUN_DIR}/mcp-get.payload.json\`" + echo "\`\`\`json" + cat "${RUN_DIR}/mcp-get.payload.json" + echo "\`\`\`" + echo + echo "**Update response** \`${RUN_DIR}/mcp-update.json\`" + echo "\`\`\`json" + cat "${RUN_DIR}/mcp-update.json" + echo "\`\`\`" + echo + echo "**Get-after-update payload (parsed)** \`${RUN_DIR}/mcp-get-after-update.payload.json\`" + echo "\`\`\`json" + cat "${RUN_DIR}/mcp-get-after-update.payload.json" + echo "\`\`\`" + echo + echo "**Reinforce response** \`${RUN_DIR}/mcp-reinforce.json\`" + echo "\`\`\`json" + cat "${RUN_DIR}/mcp-reinforce.json" + echo "\`\`\`" + echo + echo "**Delete response** \`${RUN_DIR}/mcp-delete.json\`" + echo "\`\`\`json" + cat "${RUN_DIR}/mcp-delete.json" + echo "\`\`\`" + echo + echo "**Get-after-delete response** \`${RUN_DIR}/mcp-get-after-delete.json\`" + echo "\`\`\`json" + cat "${RUN_DIR}/mcp-get-after-delete.json" + echo "\`\`\`" + echo + echo "### MCP large content (truncation regression check)" + echo "- Stored content length: ${big_len}" + echo "- Retrieved payload file (contains full content): ${RUN_DIR}/mcp-get-big.payload.json" + echo "- Store response: ${RUN_DIR}/mcp-store-big.json" + echo "- Get response: ${RUN_DIR}/mcp-get-big.json" + echo + echo "### Persistence (volume survives restart)" + echo "**Add** \`${RUN_DIR}/persist-add.json\`" + echo "\`\`\`json" + cat "${RUN_DIR}/persist-add.json" + echo "\`\`\`" + echo + echo "**Get after restart** \`${RUN_DIR}/persist-get-after-restart.json\`" + echo "\`\`\`json" + cat "${RUN_DIR}/persist-get-after-restart.json" + echo "\`\`\`" + echo + echo "## How To Reproduce" + echo + echo "\`\`\`bash" + echo "scripts/run-pr-tests.sh" + echo "\`\`\`" + } > "${REPORT_FILE}" + echo "[pr-tests] ✅ report: ${REPORT_FILE}" +} + +section "Versions" +require_cmd git +require_cmd python3 +git -C "${ROOT_DIR}" rev-parse HEAD +git -C "${ROOT_DIR}" status -sb + +if command -v docker >/dev/null 2>&1; then + docker version +else + echo "[pr-tests] docker not found; cannot run containerized tests." >&2 + exit 127 +fi + +section "Node SDK (packages/openmemory-js)" +run_in_docker_node + +section "Python SDK (packages/openmemory-py)" +run_in_docker_python + +section "Docker Build (packages/openmemory-js/Dockerfile)" +docker build -t openmemory-prtest:local "${ROOT_DIR}/packages/openmemory-js" + +section "MCP/HTTP Smoke (Docker image)" +require_cmd curl +run_mcp_http_smoke "openmemory-prtest:local" + +write_report + +section "Done" +echo "[pr-tests] ✅ all checks passed" +echo "[pr-tests] log: ${LOG_FILE}" +echo "[pr-tests] report: ${REPORT_FILE}" diff --git a/tools/migrate/__main__.py b/tools/migrate/__main__.py index 258c30b5..b628ac6e 100644 --- a/tools/migrate/__main__.py +++ b/tools/migrate/__main__.py @@ -22,7 +22,7 @@ async def main(): parser.add_argument("--api-key", required=True, help="Source API Key") parser.add_argument("--url", help="Source API URL (optional)") parser.add_argument("--output", default="./exports", help="Export directory") - parser.add_argument("--openmemory-url", default=os.getenv("OPENMEMORY_URL", "http://localhost:8080"), help="Target OpenMemory URL") + parser.add_argument("--openmemory-url", default=os.getenv("OPENMEMORY_URL", "http://localhost:18080"), help="Target OpenMemory URL") parser.add_argument("--openmemory-key", default=os.getenv("OPENMEMORY_API_KEY", ""), help="Target OpenMemory Key") parser.add_argument("--verbose", action="store_true", help="Debug logging") parser.add_argument("--verify", action="store_true", help="Run verification") diff --git a/tools/migrate/schemas.py b/tools/migrate/schemas.py index 8b4e5686..b09e613f 100644 --- a/tools/migrate/schemas.py +++ b/tools/migrate/schemas.py @@ -16,7 +16,7 @@ class MigrationConfig: output_dir: str = "./exports" batch_size: int = 1000 rate_limit: float = 1.0 - openmemory_url: str = "http://localhost:8080" + openmemory_url: str = "http://localhost:18080" openmemory_key: str = "" verify: bool = False resume: bool = False