diff --git a/.github/workflows/tests-evault-core.yml b/.github/workflows/tests-evault-core.yml index 0b600c9d..473eadd5 100644 --- a/.github/workflows/tests-evault-core.yml +++ b/.github/workflows/tests-evault-core.yml @@ -23,12 +23,34 @@ jobs: with: node-version: 22 + - name: Install build dependencies + run: | + sudo apt-get update + sudo apt-get install -y build-essential python3 + - name: Install pnpm run: npm install -g pnpm - name: Install dependencies run: pnpm install + - name: Clean and rebuild ssh2 native module + run: | + # Remove any pre-built binaries that might be incompatible + find node_modules -name "sshcrypto.node" -delete 2>/dev/null || true + find node_modules -path "*/ssh2/lib/protocol/crypto/build/Release/sshcrypto.node" -delete 2>/dev/null || true + # Rebuild ssh2 specifically for this platform + pnpm rebuild ssh2 + # Rebuild all other native modules + pnpm rebuild + - name: Run tests + env: + CI: true + GITHUB_ACTIONS: true + DOCKER_HOST: unix:///var/run/docker.sock + TESTCONTAINERS_DOCKER_SOCKET_OVERRIDE: /var/run/docker.sock + TESTCONTAINERS_RYUK_DISABLED: false + TESTCONTAINERS_HOST_OVERRIDE: localhost run: pnpm -F=evault-core test diff --git a/.github/workflows/tests-registry.yml b/.github/workflows/tests-registry.yml new file mode 100644 index 00000000..8a5d717a --- /dev/null +++ b/.github/workflows/tests-registry.yml @@ -0,0 +1,56 @@ +name: Tests [registry] + +on: + push: + branches: [main] + paths: + - 'platforms/registry/**' + pull_request: + branches: [main] + paths: + - 'platforms/registry/**' + +jobs: + test: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Node.js 22 + uses: actions/setup-node@v4 + with: + node-version: 22 + + - name: Install build dependencies + run: | + sudo apt-get update + sudo apt-get install -y build-essential python3 + + - name: Install pnpm + run: npm install -g pnpm + + - name: Install dependencies + run: pnpm install + + - name: Clean and rebuild ssh2 native module + run: | + # Remove any pre-built binaries that might be incompatible + find node_modules -name "sshcrypto.node" -delete 2>/dev/null || true + find node_modules -path "*/ssh2/lib/protocol/crypto/build/Release/sshcrypto.node" -delete 2>/dev/null || true + # Rebuild ssh2 specifically for this platform + pnpm rebuild ssh2 + # Rebuild all other native modules + pnpm rebuild + + - name: Run tests + env: + CI: true + GITHUB_ACTIONS: true + DOCKER_HOST: unix:///var/run/docker.sock + TESTCONTAINERS_DOCKER_SOCKET_OVERRIDE: /var/run/docker.sock + TESTCONTAINERS_RYUK_DISABLED: false + TESTCONTAINERS_HOST_OVERRIDE: localhost + run: pnpm -F=registry test + diff --git a/.gitignore b/.gitignore index c914036d..01b0a070 100644 --- a/.gitignore +++ b/.gitignore @@ -40,6 +40,9 @@ yarn-error.log* # Misc .DS_Store *.pem - -evault-cache.json \ No newline at end of file + +evault-cache.json + +.pnpm-store +.pnpm-store/ diff --git a/Dockerfile.dev b/Dockerfile.dev new file mode 100644 index 00000000..7781c3ec --- /dev/null +++ b/Dockerfile.dev @@ -0,0 +1,21 @@ +FROM node:20-alpine + +WORKDIR /app + +# Install pnpm +RUN npm install -g pnpm@10.13.1 + +# Copy entrypoint script +COPY docker-entrypoint.sh /usr/local/bin/ +RUN chmod +x /usr/local/bin/docker-entrypoint.sh + +ENTRYPOINT ["docker-entrypoint.sh"] + +# Copy only root config files - source code comes via volumes +# This keeps the image small and export fast +COPY package.json pnpm-lock.yaml pnpm-workspace.yaml turbo.json ./ + +# Note: Dependencies are installed on first container run via entrypoint script +# This avoids huge layer exports during build. Volumes mount source code, +# and entrypoint ensures deps are installed before running commands. + diff --git a/dev-docker-compose.README.md b/dev-docker-compose.README.md new file mode 100644 index 00000000..4547e353 --- /dev/null +++ b/dev-docker-compose.README.md @@ -0,0 +1,89 @@ +# Dev Docker Compose + +This docker-compose file sets up the development environment for the Metastate project. + +## Core Services (Always Running) + +- **registry** - Runs on port 4321 +- **evault-core** - Runs on ports 3001 (Express/Provisioning) and 4000 (Fastify/GraphQL) +- **neo4j** - Runs on ports 7474 (HTTP) and 7687 (Bolt) for graph data storage +- **postgres** - Runs on port 5432 with multiple databases pre-created + +## Optional Platform Services + +Use Docker Compose profiles to enable optional platforms: + +### Available Profiles + +- `pictique` - Pictique API (port 1111) +- `evoting` - eVoting API (port 4000) +- `dreamsync` - DreamSync API (port 4001) +- `cerberus` - Cerberus (port 3002) +- `group-charter` - Group Charter Manager API (port 3003) +- `blabsy` - Blabsy W3DS Auth API (port 3000) +- `ereputation` - eReputation (port 5000) +- `marketplace` - Marketplace (port 5001) +- `all` - Enable all optional platforms at once + +## Usage + +### Start core services only: +```bash +docker compose -f dev-docker-compose.yaml up +``` + +### Start with specific platforms: +```bash +# Single platform +docker compose -f dev-docker-compose.yaml --profile pictique up + +# Multiple platforms +docker compose -f dev-docker-compose.yaml --profile pictique --profile evoting up + +# All platforms +docker compose -f dev-docker-compose.yaml --profile all up +``` + +### Background mode: +```bash +docker compose -f dev-docker-compose.yaml --profile pictique up -d +``` + +### Stop services: +```bash +docker compose -f dev-docker-compose.yaml down +``` + +### View logs: +```bash +# All services +docker compose -f dev-docker-compose.yaml logs -f + +# Specific service +docker compose -f dev-docker-compose.yaml logs -f registry +``` + +## Environment Variables + +Create a `.env` file in the project root with your configuration: + +```env +# Registry +REGISTRY_SHARED_SECRET=your-secret-here +PUBLIC_REGISTRY_URL=http://localhost:4321 + +# Database URLs (optional - defaults are provided) +REGISTRY_DATABASE_URL=postgresql://postgres:postgres@postgres:5432/registry +NEO4J_URI=bolt://neo4j:7687 +NEO4J_USER=neo4j +NEO4J_PASSWORD=neo4j +``` + +## Notes + +- All services mount the source code for hot-reload development +- Node modules are stored in Docker volumes to avoid host conflicts +- PostgreSQL automatically creates all required databases on first startup +- Services wait for database health checks before starting + + diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml new file mode 100644 index 00000000..737f4192 --- /dev/null +++ b/dev-docker-compose.yaml @@ -0,0 +1,365 @@ +version: '3.8' + +services: + # Core Services - Always Running + registry: + build: + context: . + dockerfile: ./Dockerfile.dev + ports: + - "4321:4321" + environment: + - NODE_ENV=development + - DATABASE_URL=${REGISTRY_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/registry} + - REGISTRY_SHARED_SECRET=${REGISTRY_SHARED_SECRET:-dev-secret-change-me} + - PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://localhost:4321} + volumes: + - .:/app + - node_modules_cache:/app/node_modules + working_dir: /app/platforms/registry + command: sh -c "cd /app/platforms/registry && pnpm run dev" + depends_on: + postgres: + condition: service_healthy + networks: + - metastate-network + develop: + watch: + - action: restart + path: ./platforms/registry/src + ignore: + - node_modules + - action: restart + path: ./infrastructure/w3id/src + ignore: + - node_modules + - action: rebuild + path: ./platforms/registry/package.json + - action: rebuild + path: ./infrastructure/w3id/package.json + - action: rebuild + path: ./.env + + evault-core: + build: + context: . + dockerfile: ./Dockerfile.dev + ports: + - "3001:3001" # Express (provisioning API) + - "4000:4000" # Fastify (GraphQL/HTTP) + environment: + - NODE_ENV=development + - EXPRESS_PORT=3001 + - FASTIFY_PORT=4000 + - PORT=4000 + - REGISTRY_DATABASE_URL=${REGISTRY_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/registry} + - PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://registry:4321} + - REGISTRY_SHARED_SECRET=${REGISTRY_SHARED_SECRET:-dev-secret-change-me} + - NEO4J_URI=${NEO4J_URI:-bolt://neo4j:7687} + - NEO4J_USER=${NEO4J_USER:-neo4j} + - NEO4J_PASSWORD=${NEO4J_PASSWORD:-neo4j} + volumes: + - .:/app + - node_modules_cache:/app/node_modules + working_dir: /app/infrastructure/evault-core + command: sh -c "cd /app/infrastructure/evault-core && pnpm run dev" + depends_on: + postgres: + condition: service_healthy + registry: + condition: service_started + neo4j: + condition: service_started + networks: + - metastate-network + develop: + watch: + - action: restart + path: ./infrastructure/evault-core/src + ignore: + - node_modules + - action: restart + path: ./infrastructure/w3id/src + ignore: + - node_modules + - action: rebuild + path: ./infrastructure/evault-core/package.json + - action: rebuild + path: ./infrastructure/w3id/package.json + - action: rebuild + path: ./.env + + # Neo4j for evault-core graph data + neo4j: + image: neo4j:5.15 + ports: + - "7474:7474" # HTTP + - "7687:7687" # Bolt + environment: + - NEO4J_AUTH=${NEO4J_USER:-neo4j}/${NEO4J_PASSWORD:-neo4j} + volumes: + - neo4j_data:/var/lib/neo4j/data + networks: + - metastate-network + healthcheck: + test: ["CMD-SHELL", "cypher-shell -u neo4j -p ${NEO4J_PASSWORD:-neo4j} 'RETURN 1' || exit 1"] + interval: 10s + timeout: 5s + retries: 5 + + # Database for services + postgres: + image: postgres:15-alpine + ports: + - "5432:5432" + environment: + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=postgres + - POSTGRES_MULTIPLE_DATABASES=registry,pictique,evoting,dreamsync,cerberus,group_charter_manager,blabsy_auth,ereputation,marketplace + volumes: + - postgres_data:/var/lib/postgresql/data + - ./db/init-multiple-databases.sh:/docker-entrypoint-initdb.d/init-multiple-databases.sh + networks: + - metastate-network + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 10s + timeout: 5s + retries: 5 + + # Optional Platform Services - Use profiles to enable + + # Pictique API + pictique-api: + profiles: + - pictique + - all + build: + context: . + dockerfile: ./Dockerfile.dev + ports: + - "1111:1111" + environment: + - NODE_ENV=development + - PORT=1111 + - DATABASE_URL=${PICTIQUE_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/pictique} + - PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://registry:4321} + - PUBLIC_PICTIQUE_BASE_URL=${PUBLIC_PICTIQUE_BASE_URL:-http://localhost:1111} + volumes: + - .:/app + - node_modules_cache:/app/node_modules + working_dir: /app/platforms/pictique-api + command: pnpm run dev + depends_on: + postgres: + condition: service_healthy + registry: + condition: service_started + networks: + - metastate-network + + # eVoting API + evoting-api: + profiles: + - evoting + - all + build: + context: . + dockerfile: ./Dockerfile.dev + ports: + - "4000:4000" + environment: + - NODE_ENV=development + - PORT=4000 + - DATABASE_URL=${EVOTING_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/evoting} + - PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://registry:4321} + - PUBLIC_EVOTING_BASE_URL=${PUBLIC_EVOTING_BASE_URL:-http://localhost:4000} + volumes: + - .:/app + - node_modules_cache:/app/node_modules + working_dir: /app/platforms/evoting-api + command: pnpm run dev + depends_on: + postgres: + condition: service_healthy + registry: + condition: service_started + networks: + - metastate-network + + # DreamSync API + dreamsync-api: + profiles: + - dreamsync + - all + build: + context: . + dockerfile: ./Dockerfile.dev + ports: + - "4001:4001" + environment: + - NODE_ENV=development + - PORT=4001 + - DATABASE_URL=${DREAMSYNC_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/dreamsync} + - PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://registry:4321} + - DREAMSYNC_CLIENT_URL=${DREAMSYNC_CLIENT_URL:-http://localhost:4001} + - JWT_SECRET=${JWT_SECRET:-dev-jwt-secret} + volumes: + - .:/app + - node_modules_cache:/app/node_modules + working_dir: /app/platforms/dreamsync-api + command: pnpm run dev + depends_on: + postgres: + condition: service_healthy + registry: + condition: service_started + networks: + - metastate-network + + # Cerberus + cerberus: + profiles: + - cerberus + - all + build: + context: . + dockerfile: ./Dockerfile.dev + ports: + - "3002:3002" + environment: + - NODE_ENV=development + - PORT=3002 + - DATABASE_URL=${CERBERUS_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/cerberus} + - PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://registry:4321} + - PUBLIC_CERBERUS_BASE_URL=${PUBLIC_CERBERUS_BASE_URL:-http://localhost:3002} + volumes: + - .:/app + - node_modules_cache:/app/node_modules + working_dir: /app/platforms/cerberus + command: pnpm run dev + depends_on: + postgres: + condition: service_healthy + registry: + condition: service_started + networks: + - metastate-network + + # Group Charter Manager API + group-charter-manager-api: + profiles: + - group-charter + - all + build: + context: . + dockerfile: ./Dockerfile.dev + ports: + - "3003:3003" + environment: + - NODE_ENV=development + - PORT=3003 + - DATABASE_URL=${GROUP_CHARTER_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/group_charter_manager} + - PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://registry:4321} + - PUBLIC_GROUP_CHARTER_BASE_URL=${PUBLIC_GROUP_CHARTER_BASE_URL:-http://localhost:3003} + volumes: + - .:/app + - node_modules_cache:/app/node_modules + working_dir: /app/platforms/group-charter-manager-api + command: pnpm run dev + depends_on: + postgres: + condition: service_healthy + registry: + condition: service_started + networks: + - metastate-network + + # Blabsy W3DS Auth API + blabsy-w3ds-auth-api: + profiles: + - blabsy + - all + build: + context: . + dockerfile: ./Dockerfile.dev + ports: + - "3000:3000" + environment: + - NODE_ENV=development + - PORT=3000 + - PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://registry:4321} + - PUBLIC_BLABSY_BASE_URL=${PUBLIC_BLABSY_BASE_URL:-http://localhost:3000} + volumes: + - .:/app + - node_modules_cache:/app/node_modules + working_dir: /app/platforms/blabsy-w3ds-auth-api + command: pnpm run dev + depends_on: + registry: + condition: service_started + networks: + - metastate-network + + # eReputation + ereputation: + profiles: + - ereputation + - all + build: + context: . + dockerfile: ./Dockerfile.dev + ports: + - "5000:5000" + environment: + - NODE_ENV=development + - PORT=5000 + - DATABASE_URL=${EREPUTATION_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/ereputation} + - PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://registry:4321} + volumes: + - .:/app + - node_modules_cache:/app/node_modules + working_dir: /app/platforms/eReputation + command: pnpm run dev + depends_on: + postgres: + condition: service_healthy + registry: + condition: service_started + networks: + - metastate-network + + # Marketplace + marketplace: + profiles: + - marketplace + - all + build: + context: . + dockerfile: ./Dockerfile.dev + ports: + - "5001:5001" + environment: + - NODE_ENV=development + - PORT=5001 + - PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://registry:4321} + volumes: + - .:/app + - node_modules_cache:/app/node_modules + working_dir: /app/platforms/marketplace + command: pnpm run dev + depends_on: + registry: + condition: service_started + networks: + - metastate-network + +volumes: + postgres_data: + neo4j_data: + node_modules_cache: + +networks: + metastate-network: + driver: bridge + diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh new file mode 100755 index 00000000..2e4271ad --- /dev/null +++ b/docker-entrypoint.sh @@ -0,0 +1,52 @@ +#!/bin/sh +set -e + +# For pnpm workspaces, dependencies need to be installed at the root +# Check if dependencies are actually installed by looking for actual binaries +# Check for ts-node which is needed by most services +LOCK_FILE="/app/node_modules/.install-lock" +MAX_WAIT=300 # 5 minutes max wait + +if [ ! -f "/app/node_modules/.bin/ts-node" ] && [ ! -f "/app/platforms/registry/node_modules/.bin/ts-node" ]; then + # Try to acquire lock - wait if another container is installing + WAIT_TIME=0 + while [ -f "$LOCK_FILE" ] && [ $WAIT_TIME -lt $MAX_WAIT ]; do + echo "⏳ Waiting for another container to finish installing dependencies..." + sleep 5 + WAIT_TIME=$((WAIT_TIME + 5)) + # Re-check if install completed while waiting + if [ -f "/app/node_modules/.bin/ts-node" ]; then + echo "✅ Dependencies installed by another container" + exec "$@" + fi + done + + if [ $WAIT_TIME -ge $MAX_WAIT ]; then + echo "❌ Timeout waiting for dependency installation" + exit 1 + fi + + # Create lock file + touch "$LOCK_FILE" + + echo "⚠️ Installing workspace dependencies at root (first run only)..." + cd /app + if pnpm install --frozen-lockfile; then + echo "✅ Dependencies installed" + else + echo "❌ Failed to install dependencies" + rm -f "$LOCK_FILE" + exit 1 + fi + + # Remove lock file + rm -f "$LOCK_FILE" +else + echo "✅ Dependencies already installed" +fi + +# Run the command passed to the container +# The working_dir is set by docker-compose, so $PWD should already be correct +# But we ensure we're in the right place by letting docker-compose handle it +exec "$@" + diff --git a/infrastructure/eid-wallet/src/routes/(app)/scan-qr/+page.svelte b/infrastructure/eid-wallet/src/routes/(app)/scan-qr/+page.svelte index 547233db..f3f083a4 100644 --- a/infrastructure/eid-wallet/src/routes/(app)/scan-qr/+page.svelte +++ b/infrastructure/eid-wallet/src/routes/(app)/scan-qr/+page.svelte @@ -1,1406 +1,1331 @@ diff --git a/infrastructure/evault-core/README.md b/infrastructure/evault-core/README.md index 64da9ead..6546befb 100644 --- a/infrastructure/evault-core/README.md +++ b/infrastructure/evault-core/README.md @@ -1,182 +1,188 @@ -# eVault Core +# Evault Provisioner -eVault is a secure, distributed data storage and access system designed for the MetaState ecosystem. It provides a robust framework for storing, managing, and accessing structured data with fine-grained access control and GraphQL-based querying capabilities. +A TypeScript API for provisioning evault instances on Nomad. This service allows you to spin up evault instances with Neo4j backends for different tenants. -## Overview +## Prerequisites -eVault is a core component of the MetaState infrastructure that enables: +- Node.js 18+ +- Docker +- Nomad (see setup instructions below) +- OrbStack (for macOS users) -- Secure storage of structured data -- Fine-grained access control using W3ID -- GraphQL-based data querying and manipulation -- Distributed data management -- Integration with the MetaState ecosystem +## Nomad Setup -## Architecture - -### Core Components +### macOS Setup (using OrbStack) -1. **GraphQL Server** +Due to CNI bridge plugin requirements, running Nomad on macOS is best done through OrbStack: - - Provides a flexible API for data operations - - Supports complex queries and mutations - - Includes built-in documentation and visualization tools +1. Install OrbStack: https://orbstack.dev/ +2. Create a new VM in OrbStack +3. SSH into the VM and install Nomad: -2. **Access Control System** +```bash +# Install Nomad +curl -fsSL https://apt.releases.hashicorp.com/gpg | sudo apt-key add - +sudo apt-add-repository "deb [arch=amd64] https://apt.releases.hashicorp.com $(lsb_release -cs) main" +sudo apt-get update && sudo apt-get install nomad + +# Install CNI plugins +sudo mkdir -p /opt/cni/bin +curl -L https://github.com/containernetworking/plugins/releases/download/v1.3.0/cni-plugins-linux-amd64-v1.3.0.tgz | sudo tar -C /opt/cni/bin -xz +``` - - W3ID-based authentication - - Fine-grained access control lists (ACL) - - Secure token-based authentication +4. Start Nomad in dev mode: -3. **Data Storage** +```bash +sudo nomad agent -dev -network-interface=eth0 -log-level=DEBUG -bind=0.0.0.0 +``` - - Neo4j-based storage backend - - Structured data model with envelopes - - Support for multiple data types and ontologies +### Linux Setup -4. **HTTP Server** - - Fastify-based web server - - RESTful endpoints for basic operations - - GraphQL endpoint for advanced operations +1. Install Nomad: -### Data Model +```bash +# Install Nomad +curl -fsSL https://apt.releases.hashicorp.com/gpg | sudo apt-key add - +sudo apt-add-repository "deb [arch=amd64] https://apt.releases.hashicorp.com $(lsb_release -cs) main" +sudo apt-get update && sudo apt-get install nomad + +# Install CNI plugins +sudo mkdir -p /opt/cni/bin +curl -L https://github.com/containernetworking/plugins/releases/download/v1.3.0/cni-plugins-linux-amd64-v1.3.0.tgz | sudo tar -C /opt/cni/bin -xz +``` -The eVault system uses a hierarchical data model: +2. Start Nomad in dev mode: -- **MetaEnvelope**: Top-level container for related data +```bash +sudo nomad agent -dev -network-interface=eth0 -log-level=DEBUG -bind=0.0.0.0 +``` - - Contains multiple Envelopes - - Has an associated ontology - - Includes access control information +## Project Setup -- **Envelope**: Individual data container - - Contains structured data - - Has a specific value type - - Linked to a MetaEnvelope +1. Install dependencies: -## Features +```bash +npm install +``` -### 1. Data Management +2. Build the project: -- Store and retrieve structured data -- Update and delete data with version control -- Search and filter data by ontology and content +```bash +npm run build +``` -### 2. Access Control +3. Start the server: -- W3ID-based authentication -- Fine-grained access control lists -- Secure token-based operations +```bash +npm start +``` -### 3. Query Capabilities +For development with auto-reload: -- GraphQL-based querying -- Complex search operations -- Real-time data access +```bash +npm run dev +``` -### 4. Integration +## API Endpoints -- Seamless integration with W3ID -- Support for multiple data formats -- Extensible architecture +### Health Check -## API Documentation +``` +GET /health +``` -### GraphQL Operations +Returns the health status of the API. -#### Queries +### Provision Evault -- `getMetaEnvelopeById`: Retrieve a specific MetaEnvelope -- `findMetaEnvelopesByOntology`: Find envelopes by ontology -- `searchMetaEnvelopes`: Search envelopes by content -- `getAllEnvelopes`: List all available envelopes +``` +POST /provision +``` -#### Mutations +Provisions a new evault instance for a tenant. -- `storeMetaEnvelope`: Create a new MetaEnvelope -- `deleteMetaEnvelope`: Remove a MetaEnvelope -- `updateEnvelopeValue`: Update envelope content +Request body: -### HTTP Endpoints +```json +{ + "tenantId": "your-tenant-id" +} +``` -- `/graphql`: GraphQL API endpoint -- `/voyager`: GraphQL schema visualization -- `/documentation`: API documentation +Response: -## Getting Started +```json +{ + "success": true, + "message": "Successfully provisioned evault for tenant your-tenant-id", + "jobName": "evault-your-tenant-id" +} +``` -### Prerequisites +## Architecture -- Node.js -- Neo4j database -- W3ID system +The provisioner creates a Nomad job that consists of two tasks: -### Installation +1. **Neo4j Task**: -1. Clone the repository -2. Install dependencies: - ```bash - npm install - ``` -3. Configure environment variables: - ``` - NEO4J_URI=bolt://localhost:7687 - NEO4J_USER=neo4j - NEO4J_PASSWORD=your_password - PORT=4000 - ``` -4. Start the server: - ```bash - npm start - ``` + - Runs Neo4j 5.15 + - Exposes ports: 7687 (bolt) and 7474 (browser) + - Uses dynamic ports for flexibility + - 2GB memory allocation -## Security Considerations +2. **Evault Task**: + - Runs the evault application + - Connects to Neo4j via localhost + - Uses dynamic port allocation + - 512MB memory allocation + - Depends on Neo4j task -- All operations require W3ID authentication -- Access control is enforced at both API and database levels -- Data is encrypted in transit and at rest -- Regular security audits and updates +## Environment Variables -## Integration Guide +- `PORT` - Port to run the API on (default: 3000) +- `NOMAD_ADDR` - Nomad API address (default: http://localhost:4646) -### W3ID Integration +## Troubleshooting -eVault uses W3ID for authentication and access control: +### Common Issues -1. Obtain a W3ID token -2. Include token in Authorization header -3. Access eVault resources based on permissions +1. **Port Allocation Issues**: -### Data Storage + - Ensure Nomad is running with CNI plugins installed + - Check that the network interface is correctly specified + - Verify that ports are not already in use -1. Define data ontology -2. Create MetaEnvelope with appropriate ACL -3. Store and manage data through the API +2. **Container Networking**: -## Development + - Ensure Docker is running + - Check that the bridge network is properly configured + - Verify container-to-container communication -### Testing +3. **Nomad Job Failures**: + - Check Nomad logs for detailed error messages + - Verify that all required images are available + - Ensure resource allocations are sufficient -```bash -npm test -``` +### Debugging -### Documentation +To debug Nomad issues: -- API documentation available at `/documentation` -- GraphQL schema visualization at `/voyager` -- Example queries in `src/protocol/examples` +```bash +# View Nomad logs +journalctl -u nomad -f -## Contributing +# Check Nomad status +nomad status -1. Fork the repository -2. Create feature branch -3. Submit pull request +# View specific job details +nomad job status evault- -## License +# View allocation details +nomad alloc status +``` -[License information] +## Development -## Support +The project uses TypeScript for type safety and better development experience. The source files are in the `src` directory and are compiled to the `dist` directory. -[Support information] +For development, you can use `npm run dev` which uses `tsx` to run the TypeScript files directly without compilation. diff --git a/infrastructure/evault-core/docs/w3id-integration.md b/infrastructure/evault-core/docs/w3id-integration.md deleted file mode 100644 index 0543afe6..00000000 --- a/infrastructure/evault-core/docs/w3id-integration.md +++ /dev/null @@ -1,303 +0,0 @@ -# W3ID Integration Documentation - -## Overview - -The eVault Core system integrates with W3ID (Web3 Identity) to provide decentralized identity verification and signature capabilities. This document outlines the technical implementation and functional aspects of the W3ID integration. - -## Technical Architecture - -### Components - -1. **W3ID Client** - - - Uses the `w3id` package for identity verification - - Handles JWT token validation and signature verification - - Manages identity claims and verification status - -2. **HTTP Endpoints** - - - Fastify-based REST API - - Swagger documentation available at `/docs` - - GraphQL integration for complex queries - -3. **Signature System** - - Decentralized signature verification - - Log-based signature tracking - - Multi-party signature support - -## API Endpoints - -### Identity Verification - -#### GET /whois - -Returns W3ID identity information and associated logs. - -**Request:** - -```http -GET /whois -Authorization: Bearer -``` - -**Response:** - -```json -{ - "w3id": { - "did": "did:example:123", - "verificationStatus": "verified", - "claims": { - "name": "John Doe", - "email": "john@example.com" - } - }, - "logs": [ - { - "timestamp": "2024-03-20T12:00:00Z", - "action": "identity_verification", - "status": "success" - } - ] -} -``` - -### Signature Management - -#### POST /watchers/sign - -Submit a signature for a specific log entry. - -**Request:** - -```http -POST /watchers/sign -Authorization: Bearer -Content-Type: application/json - -{ - "w3id": "did:example:123", - "signature": "0x1234...", - "logEntryId": "log_123" -} -``` - -**Response:** - -```json -{ - "success": true, - "message": "Signature stored successfully" -} -``` - -#### POST /watchers/request - -Request a signature for a log entry. - -**Request:** - -```http -POST /watchers/request -Authorization: Bearer -Content-Type: application/json - -{ - "w3id": "did:example:123", - "logEntryId": "log_123" -} -``` - -**Response:** - -```json -{ - "success": true, - "message": "Signature request created", - "requestId": "req_1234567890" -} -``` - -## Functional Documentation - -### Identity Verification Flow - -1. **Initial Verification** - - - User presents W3ID JWT token - - System validates token and extracts identity claims - - Identity status is logged in the system - -2. **Signature Request Process** - - - User requests signature for a log entry - - System verifies user's identity and permissions - - Signature request is created and tracked - -3. **Signature Submission** - - User submits signature for requested log entry - - System validates signature against W3ID - - Signature is recorded in the log - -### Security Considerations - -1. **Token Validation** - - - All endpoints require valid W3ID JWT tokens - - Token expiration is enforced - - Token claims are verified against system requirements - -2. **Signature Security** - - - Signatures are cryptographically verified - - Each signature is tied to a specific identity - - Signature requests are tracked and validated - -3. **Log Integrity** - - All actions are logged with timestamps - - Log entries are immutable once signed - - Multi-party verification is supported - -## Integration Guide - -### Prerequisites - -1. W3ID JWT token generation -2. Access to the eVault Core system -3. Proper permissions for signature operations - -### Implementation Steps - -1. **Identity Setup** - - ```typescript - import { W3ID } from "w3id"; - - const w3id = new W3ID({ - // Configuration options - }); - ``` - -2. **Token Generation** - - ```typescript - const token = await w3id.generateToken({ - claims: { - // Identity claims - }, - }); - ``` - -3. **API Integration** - ```typescript - // Example API call with W3ID token - const response = await fetch("/whois", { - headers: { - Authorization: `Bearer ${token}`, - }, - }); - ``` - -## Error Handling - -### Common Error Responses - -1. **Invalid Token** - - ```json - { - "error": "invalid_token", - "message": "Invalid or expired W3ID token" - } - ``` - -2. **Invalid Signature** - - ```json - { - "error": "invalid_signature", - "message": "Signature verification failed" - } - ``` - -3. **Permission Denied** - ```json - { - "error": "permission_denied", - "message": "Insufficient permissions for operation" - } - ``` - -## Monitoring and Logging - -### Log Structure - -```typescript -interface LogEntry { - timestamp: string; - action: - | "identity_verification" - | "signature_request" - | "signature_submission"; - status: "success" | "failure"; - details: { - w3id: string; - logEntryId?: string; - signature?: string; - error?: string; - }; -} -``` - -### Monitoring Endpoints - -1. **Identity Status** - - - Track verification attempts - - Monitor token usage - - Audit identity changes - -2. **Signature Tracking** - - Monitor signature requests - - Track signature submissions - - Audit signature verification - -## Best Practices - -1. **Token Management** - - - Rotate tokens regularly - - Use appropriate token scopes - - Implement proper token storage - -2. **Signature Handling** - - - Validate signatures immediately - - Maintain signature audit trail - - Implement proper error handling - -3. **Security** - - Use HTTPS for all communications - - Implement rate limiting - - Monitor for suspicious activity - -## Troubleshooting - -### Common Issues - -1. **Token Validation Failures** - - - Check token expiration - - Verify token claims - - Ensure proper token format - -2. **Signature Verification Issues** - - - Verify signature format - - Check identity permissions - - Validate log entry existence - -3. **API Integration Problems** - - Verify endpoint URLs - - Check request headers - - Validate response format diff --git a/infrastructure/evault-core/package.json b/infrastructure/evault-core/package.json index 85535a2a..68bdd57e 100644 --- a/infrastructure/evault-core/package.json +++ b/infrastructure/evault-core/package.json @@ -1,34 +1,26 @@ { "name": "evault-core", - "version": "0.1.0", - "description": "", - "main": "index.js", + "version": "1.0.0", + "description": "eVault core service with provisioning and multi-tenant GraphQL API", + "main": "dist/index.js", "scripts": { - "test": "vitest --config vitest.config.ts", + "start": "node dist/index.js", + "dev": "ts-node-dev --respawn --transpile-only src/index.ts", "build": "tsc", - "dev": "node --watch --import tsx src/evault.ts", - "start": "node ./dist/evault.js" - }, - "packageManager": "pnpm@10.6.5", - "keywords": [], - "author": "", - "license": "ISC", - "devDependencies": { - "@types/json-schema": "^7.0.15", - "@types/node": "^22.13.10", - "dotenv": "^16.5.0", - "testcontainers": "^10.24.2", - "tsx": "^4.19.3", - "typescript": "^5.8.3", - "uuid": "^11.1.0", - "vitest": "^3.0.9" + "test": "vitest", + "typeorm": "typeorm-ts-node-commonjs", + "migration:generate": "npm run typeorm migration:generate -- -d src/config/database.ts", + "migration:run": "npm run typeorm migration:run -- -d src/config/database.ts", + "migration:revert": "npm run typeorm migration:revert -- -d src/config/database.ts" }, "dependencies": { "@fastify/formbody": "^8.0.2", "@fastify/swagger": "^8.14.0", "@fastify/swagger-ui": "^3.0.0", - "@testcontainers/neo4j": "^10.24.2", "axios": "^1.6.7", + "cors": "^2.8.5", + "dotenv": "^16.4.5", + "express": "^4.18.2", "fastify": "^4.26.2", "graphql": "^16.10.0", "graphql-type-json": "^0.3.2", @@ -38,7 +30,26 @@ "json-schema": "^0.4.0", "multiformats": "^13.3.2", "neo4j-driver": "^5.28.1", + "pg": "^8.11.3", + "reflect-metadata": "^0.2.1", "tweetnacl": "^1.0.3", + "typeorm": "^0.3.24", + "uuid": "^13.0.0", "w3id": "workspace:*" + }, + "devDependencies": { + "@testcontainers/neo4j": "^10.0.1", + "@testcontainers/postgresql": "^10.0.1", + "@types/cors": "^2.8.18", + "@types/express": "^4.17.21", + "@types/json-schema": "^7.0.15", + "@types/node": "^20.11.24", + "@types/uuid": "^9.0.8", + "nodemon": "^3.0.3", + "ts-node-dev": "^2.0.0", + "tsx": "^4.7.1", + "typeorm-ts-node-commonjs": "^0.3.20", + "typescript": "^5.3.3", + "vitest": "^1.6.1" } -} +} \ No newline at end of file diff --git a/infrastructure/evault-provisioner/src/config/database.ts b/infrastructure/evault-core/src/config/database.ts similarity index 80% rename from infrastructure/evault-provisioner/src/config/database.ts rename to infrastructure/evault-core/src/config/database.ts index b423e032..f7d5a64c 100644 --- a/infrastructure/evault-provisioner/src/config/database.ts +++ b/infrastructure/evault-core/src/config/database.ts @@ -9,7 +9,7 @@ dotenv.config({ path: join(__dirname, "../../../../.env") }) export const AppDataSource = new DataSource({ type: "postgres", - url: process.env.PROVISIONER_DATABASE_URL || "postgresql://postgres:postgres@localhost:5432/provisioner", + url: process.env.REGISTRY_DATABASE_URL || process.env.PROVISIONER_DATABASE_URL || "postgresql://postgres:postgres@localhost:5432/registry", logging: process.env.NODE_ENV !== "production", entities: [Verification, Notification], migrations: [join(__dirname, "../migrations/*.{ts,js}")], diff --git a/infrastructure/evault-provisioner/src/controllers/NotificationController.ts b/infrastructure/evault-core/src/controllers/NotificationController.ts similarity index 100% rename from infrastructure/evault-provisioner/src/controllers/NotificationController.ts rename to infrastructure/evault-core/src/controllers/NotificationController.ts diff --git a/infrastructure/evault-core/src/controllers/ProvisioningController.ts b/infrastructure/evault-core/src/controllers/ProvisioningController.ts new file mode 100644 index 00000000..18585ceb --- /dev/null +++ b/infrastructure/evault-core/src/controllers/ProvisioningController.ts @@ -0,0 +1,34 @@ +import { Request, Response } from "express"; +import { ProvisioningService, ProvisionRequest, ProvisionResponse } from "../services/ProvisioningService"; + +export class ProvisioningController { + constructor(private readonly provisioningService: ProvisioningService) {} + + registerRoutes(app: any) { + app.post( + "/provision", + async ( + req: Request<{}, {}, ProvisionRequest>, + res: Response + ) => { + try { + const result = await this.provisioningService.provisionEVault(req.body); + + if (!result.success) { + return res.status(500).json(result); + } + + res.json(result); + } catch (error) { + console.error("Provisioning error:", error); + res.status(500).json({ + success: false, + error: error instanceof Error ? error.message : String(error), + message: "Failed to provision evault instance", + }); + } + } + ); + } +} + diff --git a/infrastructure/evault-provisioner/src/controllers/VerificationController.ts b/infrastructure/evault-core/src/controllers/VerificationController.ts similarity index 100% rename from infrastructure/evault-provisioner/src/controllers/VerificationController.ts rename to infrastructure/evault-core/src/controllers/VerificationController.ts diff --git a/infrastructure/evault-core/src/core/db/db.service.spec.ts b/infrastructure/evault-core/src/core/db/db.service.spec.ts new file mode 100644 index 00000000..eabc2969 --- /dev/null +++ b/infrastructure/evault-core/src/core/db/db.service.spec.ts @@ -0,0 +1,565 @@ +import neo4j, { Driver } from "neo4j-driver"; +import { DbService } from "./db.service"; // adjust if needed +import { it, describe, beforeAll, afterAll, expect } from "vitest"; +import { Neo4jContainer, StartedNeo4jContainer } from "@testcontainers/neo4j"; + +type Envelope = { + id: string; + ontology: string; + value: any; + valueType: string; +}; + +describe("DbService (integration)", () => { + let container: StartedNeo4jContainer; + let service: DbService; + let driver: Driver; + const TEST_ENAME = "test@example.com"; + + beforeAll(async () => { + container = await new Neo4jContainer("neo4j:5.15").start(); + + const username = container.getUsername(); + const password = container.getPassword(); + const boltPort = container.getMappedPort(7687); + const uri = `bolt://localhost:${boltPort}`; + + driver = neo4j.driver(uri, neo4j.auth.basic(username, password)); + service = new DbService(driver); + }, 120000); + + afterAll(async () => { + await service.close(); + await driver.close(); + await container.stop(); + }); + + it("should store and retrieve a meta-envelope with various data types", async () => { + const input = { + ontology: "TestTypes", + payload: { + string: "hello world", + number: 42, + boolean: true, + date: new Date("2025-04-10T00:00:00Z"), + array: [1, 2, 3], + object: { nested: { value: "deep" } }, + }, + acl: ["@test-user"], + }; + + const result = await service.storeMetaEnvelope(input, input.acl, TEST_ENAME); + const id = result.metaEnvelope.id; + + const fetched = await service.findMetaEnvelopeById(id, TEST_ENAME); + expect(fetched).toBeDefined(); + if (!fetched) return; + expect(fetched.id).toBeDefined(); + expect(fetched.ontology).toBe("TestTypes"); + expect(fetched.acl).toEqual(["@test-user"]); + expect(fetched.envelopes).toHaveLength(6); + + // Verify parsed field matches original payload + expect(fetched.parsed).toEqual(input.payload); + + // Verify each data type is properly stored and retrieved + const envelopes = fetched.envelopes.reduce( + (acc: Record, e: Envelope) => { + acc[e.ontology] = e; + return acc; + }, + {}, + ); + + expect(envelopes.string.value).toBe("hello world"); + expect(envelopes.string.valueType).toBe("string"); + + expect(envelopes.number.value).toBe(42); + expect(envelopes.number.valueType).toBe("number"); + + expect(envelopes.boolean.value).toBe(true); + expect(envelopes.boolean.valueType).toBe("boolean"); + + expect(envelopes.date.value).toBeInstanceOf(Date); + expect(envelopes.date.value.toISOString()).toBe( + "2025-04-10T00:00:00.000Z", + ); + expect(envelopes.date.valueType).toBe("date"); + + expect(envelopes.array.value).toEqual([1, 2, 3]); + expect(envelopes.array.valueType).toBe("array"); + + expect(envelopes.object.value).toEqual({ nested: { value: "deep" } }); + expect(envelopes.object.valueType).toBe("object"); + }); + + it("should find meta-envelopes containing the search term in any envelope value", async () => { + const input = { + ontology: "SocialMediaPost", + payload: { + text: "This is a searchable tweet", + image: "https://example.com/image.jpg", + likes: ["user1", "user2"], + }, + acl: ["@search-test-user"], + }; + + const metaEnv = await service.storeMetaEnvelope(input, input.acl, TEST_ENAME); + + const found = await service.findMetaEnvelopesBySearchTerm( + "SocialMediaPost", + "searchable", + TEST_ENAME, + ); + + expect(Array.isArray(found)).toBe(true); + const match = found.find((m) => m.id === metaEnv.metaEnvelope.id); + expect(match).toBeDefined(); + if (!match) throw new Error(); + expect(match.envelopes.length).toBeGreaterThan(0); + expect( + match.envelopes.some((e) => e.value.includes("searchable")), + ).toBe(true); + }); + + it("should return empty array if no values contain the search term", async () => { + const found = await service.findMetaEnvelopesBySearchTerm( + "SocialMediaPost", + "notfoundterm", + TEST_ENAME, + ); + expect(Array.isArray(found)).toBe(true); + expect(found.length).toBe(0); + }); + + it("should find meta-envelopes by ontology", async () => { + const results = + await service.findMetaEnvelopesByOntology("SocialMediaPost", TEST_ENAME); + expect(Array.isArray(results)).toBe(true); + expect(results.length).toBeGreaterThan(0); + }); + + it("should delete a meta-envelope and its envelopes", async () => { + const meta = { + ontology: "TempPost", + payload: { + value: "to be deleted", + }, + acl: ["@delete-user"], + }; + + const stored = await service.storeMetaEnvelope(meta, meta.acl, TEST_ENAME); + await service.deleteMetaEnvelope(stored.metaEnvelope.id, TEST_ENAME); + + const deleted = await service.findMetaEnvelopeById( + stored.metaEnvelope.id, + TEST_ENAME, + ); + expect(deleted).toBeNull(); + }); + + it("should update envelope value with proper type handling", async () => { + const meta = { + ontology: "UpdateTest", + payload: { + value: "original", + }, + acl: ["@updater"], + }; + + const stored = await service.storeMetaEnvelope(meta, meta.acl, TEST_ENAME); + + const result = await service.findMetaEnvelopeById( + stored.metaEnvelope.id, + TEST_ENAME, + ); + if (!result) return; + const targetEnvelope = result.envelopes.find( + (e: Envelope) => e.ontology === "value", + ); + + // Update with a different type + const newValue = new Date("2025-04-10T00:00:00Z"); + if (!targetEnvelope) return; + await service.updateEnvelopeValue(targetEnvelope.id, newValue, TEST_ENAME); + + const updated = await service.findMetaEnvelopeById( + stored.metaEnvelope.id, + TEST_ENAME, + ); + if (!updated) return; + const updatedValue = updated.envelopes.find( + (e: Envelope) => e.id === targetEnvelope.id, + ); + + if (!updatedValue) return; + expect(updatedValue.value).toBeInstanceOf(Date); + expect(updatedValue.value.toISOString()).toBe( + "2025-04-10T00:00:00.000Z", + ); + expect(updatedValue.valueType).toBe("date"); + }); + + it("should find meta-envelopes containing the search term in any value type", async () => { + const input = { + ontology: "SearchTest", + payload: { + string: "This is a searchable string", + array: ["searchable", "array", "element"], + object: { text: "searchable object" }, + number: 42, + date: new Date("2025-04-10T00:00:00Z"), + }, + acl: ["@search-test-user"], + }; + + const metaEnv = await service.storeMetaEnvelope(input, input.acl, TEST_ENAME); + + // Test search in string + const foundInString = await service.findMetaEnvelopesBySearchTerm( + "SearchTest", + "searchable string", + TEST_ENAME, + ); + expect(foundInString.length).toBeGreaterThan(0); + expect(foundInString[0].id).toBe(metaEnv.metaEnvelope.id); + + // Test search in array + const foundInArray = await service.findMetaEnvelopesBySearchTerm( + "SearchTest", + "searchable", + TEST_ENAME, + ); + expect(foundInArray.length).toBeGreaterThan(0); + expect(foundInArray[0].id).toBe(metaEnv.metaEnvelope.id); + + // Test search in object + const foundInObject = await service.findMetaEnvelopesBySearchTerm( + "SearchTest", + "searchable object", + TEST_ENAME, + ); + expect(foundInObject.length).toBeGreaterThan(0); + expect(foundInObject[0].id).toBe(metaEnv.metaEnvelope.id); + }); + + it("should find meta-envelopes containing the search term with parsed payload", async () => { + const input = { + ontology: "SearchTestHeyyy", + payload: { + string: "This is a searchable string", + array: ["searchable", "array", "element"], + object: { text: "searchable object" }, + number: 42, + date: new Date("2025-04-10T00:00:00Z"), + }, + acl: ["@search-test-user"], + }; + + const metaEnv = await service.storeMetaEnvelope(input, input.acl, TEST_ENAME); + + // Test search in string + const foundInString = await service.findMetaEnvelopesBySearchTerm( + "SearchTestHeyyy", + "searchable string", + TEST_ENAME, + ); + expect(foundInString.length).toBeGreaterThan(0); + expect(foundInString[0].id).toBe(metaEnv.metaEnvelope.id); + + // Test search in array + const foundInArray = await service.findMetaEnvelopesBySearchTerm( + "SearchTestHeyyy", + "searchable", + TEST_ENAME, + ); + expect(foundInArray.length).toBeGreaterThan(0); + expect(foundInArray[0].id).toBe(metaEnv.metaEnvelope.id); + + // Test search in object + const foundInObject = await service.findMetaEnvelopesBySearchTerm( + "SearchTestHeyyy", + "searchable object", + TEST_ENAME, + ); + expect(foundInObject.length).toBeGreaterThan(0); + expect(foundInObject[0].id).toBe(metaEnv.metaEnvelope.id); + }); + + describe("eName isolation and data leak prevention", () => { + const TENANT1_ENAME = "tenant1@example.com"; + const TENANT2_ENAME = "tenant2@example.com"; + + it("should not return data from other tenants when querying by ID", async () => { + // Create meta-envelope for tenant1 + const tenant1Meta = await service.storeMetaEnvelope( + { + ontology: "Tenant1Secret", + payload: { secret: "tenant1-data" }, + acl: ["*"], + }, + ["*"], + TENANT1_ENAME + ); + + // Try to query tenant1's data with tenant2's eName + const result = await service.findMetaEnvelopeById( + tenant1Meta.metaEnvelope.id, + TENANT2_ENAME + ); + + // Should return null - data leak prevented! + expect(result).toBeNull(); + }); + + it("should not return data from other tenants when querying by ontology", async () => { + // Create meta-envelopes for both tenants with same ontology + await service.storeMetaEnvelope( + { + ontology: "SharedOntology", + payload: { data: "tenant1-data" }, + acl: ["*"], + }, + ["*"], + TENANT1_ENAME + ); + + await service.storeMetaEnvelope( + { + ontology: "SharedOntology", + payload: { data: "tenant2-data" }, + acl: ["*"], + }, + ["*"], + TENANT2_ENAME + ); + + // Query tenant1's data + const tenant1Results = await service.findMetaEnvelopesByOntology( + "SharedOntology", + TENANT1_ENAME + ); + + // Query tenant2's data + const tenant2Results = await service.findMetaEnvelopesByOntology( + "SharedOntology", + TENANT2_ENAME + ); + + // Each tenant should only see their own data + expect(tenant1Results.length).toBe(1); + expect(tenant1Results[0].parsed.data).toBe("tenant1-data"); + + expect(tenant2Results.length).toBe(1); + expect(tenant2Results[0].parsed.data).toBe("tenant2-data"); + + // Verify no cross-contamination + const tenant1HasTenant2Data = tenant1Results.some( + (r) => r.parsed.data === "tenant2-data" + ); + const tenant2HasTenant1Data = tenant2Results.some( + (r) => r.parsed.data === "tenant1-data" + ); + + expect(tenant1HasTenant2Data).toBe(false); + expect(tenant2HasTenant1Data).toBe(false); + }); + + it("should not return data from other tenants when searching", async () => { + // Create meta-envelopes for both tenants with searchable content + await service.storeMetaEnvelope( + { + ontology: "Searchable", + payload: { text: "tenant1-searchable-text" }, + acl: ["*"], + }, + ["*"], + TENANT1_ENAME + ); + + await service.storeMetaEnvelope( + { + ontology: "Searchable", + payload: { text: "tenant2-searchable-text" }, + acl: ["*"], + }, + ["*"], + TENANT2_ENAME + ); + + // Search tenant1's data + const tenant1Results = await service.findMetaEnvelopesBySearchTerm( + "Searchable", + "searchable", + TENANT1_ENAME + ); + + // Search tenant2's data + const tenant2Results = await service.findMetaEnvelopesBySearchTerm( + "Searchable", + "searchable", + TENANT2_ENAME + ); + + // Each tenant should only see their own results + expect(tenant1Results.length).toBe(1); + expect(tenant1Results[0].parsed.text).toBe("tenant1-searchable-text"); + + expect(tenant2Results.length).toBe(1); + expect(tenant2Results[0].parsed.text).toBe("tenant2-searchable-text"); + + // Verify no cross-contamination + const tenant1HasTenant2Data = tenant1Results.some( + (r) => r.parsed.text === "tenant2-searchable-text" + ); + const tenant2HasTenant1Data = tenant2Results.some( + (r) => r.parsed.text === "tenant1-searchable-text" + ); + + expect(tenant1HasTenant2Data).toBe(false); + expect(tenant2HasTenant1Data).toBe(false); + }); + + it("should not allow deletion of other tenants' data", async () => { + // Create meta-envelope for tenant1 + const tenant1Meta = await service.storeMetaEnvelope( + { + ontology: "Tenant1Data", + payload: { data: "tenant1-data" }, + acl: ["*"], + }, + ["*"], + TENANT1_ENAME + ); + + // Try to delete tenant1's data using tenant2's eName + await service.deleteMetaEnvelope( + tenant1Meta.metaEnvelope.id, + TENANT2_ENAME + ); + + // Data should still exist (deletion with wrong eName should be a no-op) + const stillExists = await service.findMetaEnvelopeById( + tenant1Meta.metaEnvelope.id, + TENANT1_ENAME + ); + + expect(stillExists).not.toBeNull(); + expect(stillExists?.parsed.data).toBe("tenant1-data"); + }); + + it("should not allow updating other tenants' envelope values", async () => { + // Create meta-envelope for tenant1 + const tenant1Meta = await service.storeMetaEnvelope( + { + ontology: "Tenant1Data", + payload: { value: "original-value" }, + acl: ["*"], + }, + ["*"], + TENANT1_ENAME + ); + + const envelopeId = tenant1Meta.envelopes[0].id; + + // Try to update tenant1's envelope using tenant2's eName + await service.updateEnvelopeValue( + envelopeId, + "hacked-value", + TENANT2_ENAME + ); + + // Value should remain unchanged (update with wrong eName should be a no-op) + const stillOriginal = await service.findMetaEnvelopeById( + tenant1Meta.metaEnvelope.id, + TENANT1_ENAME + ); + + expect(stillOriginal).not.toBeNull(); + expect(stillOriginal?.parsed.value).toBe("original-value"); + }); + + it("should not return data from other tenants when querying by multiple IDs", async () => { + // Create meta-envelopes for both tenants + const tenant1Meta1 = await service.storeMetaEnvelope( + { + ontology: "Tenant1Data", + payload: { data: "tenant1-data-1" }, + acl: ["*"], + }, + ["*"], + TENANT1_ENAME + ); + + const tenant1Meta2 = await service.storeMetaEnvelope( + { + ontology: "Tenant1Data", + payload: { data: "tenant1-data-2" }, + acl: ["*"], + }, + ["*"], + TENANT1_ENAME + ); + + const tenant2Meta = await service.storeMetaEnvelope( + { + ontology: "Tenant2Data", + payload: { data: "tenant2-data" }, + acl: ["*"], + }, + ["*"], + TENANT2_ENAME + ); + + // Try to query tenant1's IDs with tenant2's eName + const results = await service.findMetaEnvelopesByIds( + [tenant1Meta1.metaEnvelope.id, tenant1Meta2.metaEnvelope.id], + TENANT2_ENAME + ); + + // Should return empty array - data leak prevented! + expect(results.length).toBe(0); + + // Verify tenant1 can still access their own data + const tenant1Results = await service.findMetaEnvelopesByIds( + [tenant1Meta1.metaEnvelope.id, tenant1Meta2.metaEnvelope.id], + TENANT1_ENAME + ); + + expect(tenant1Results.length).toBe(2); + }); + + it("should throw error when eName is missing", async () => { + const metaEnvelope = await service.storeMetaEnvelope( + { + ontology: "Test", + payload: { data: "test" }, + acl: ["*"], + }, + ["*"], + TENANT1_ENAME + ); + + // All methods should throw when eName is missing + await expect( + service.findMetaEnvelopeById(metaEnvelope.metaEnvelope.id, "") + ).rejects.toThrow("eName is required"); + + await expect( + service.findMetaEnvelopesByOntology("Test", "") + ).rejects.toThrow("eName is required"); + + await expect( + service.findMetaEnvelopesBySearchTerm("Test", "test", "") + ).rejects.toThrow("eName is required"); + + await expect( + service.deleteMetaEnvelope(metaEnvelope.metaEnvelope.id, "") + ).rejects.toThrow("eName is required"); + + await expect( + service.updateEnvelopeValue(metaEnvelope.envelopes[0].id, "new", "") + ).rejects.toThrow("eName is required"); + }); + }); +}); diff --git a/infrastructure/evault-core/src/db/db.service.ts b/infrastructure/evault-core/src/core/db/db.service.ts similarity index 82% rename from infrastructure/evault-core/src/db/db.service.ts rename to infrastructure/evault-core/src/core/db/db.service.ts index 954e3dd1..b30cf595 100644 --- a/infrastructure/evault-core/src/db/db.service.ts +++ b/infrastructure/evault-core/src/core/db/db.service.ts @@ -44,6 +44,7 @@ export class DbService { * Stores a new meta-envelope and its associated envelopes. * @param meta - The meta-envelope data (without ID) * @param acl - The access control list for the meta-envelope + * @param eName - The eName identifier for multi-tenant isolation * @returns The created meta-envelope and its envelopes */ async storeMetaEnvelope< @@ -51,17 +52,23 @@ export class DbService { >( meta: Omit, "id">, acl: string[], + eName: string, ): Promise> { + if (!eName) { + throw new Error("eName is required for storing meta-envelopes"); + } + const w3id = await new W3IDBuilder().build(); const cypher: string[] = [ - `CREATE (m:MetaEnvelope { id: $metaId, ontology: $ontology, acl: $acl })`, + `CREATE (m:MetaEnvelope { id: $metaId, ontology: $ontology, acl: $acl, eName: $eName })`, ]; const envelopeParams: Record = { metaId: w3id.id, ontology: meta.ontology, acl: acl, + eName: eName, }; const createdEnvelopes: Envelope[] = []; @@ -118,6 +125,7 @@ export class DbService { * Returns all envelopes from the matched meta-envelopes. * @param ontology - The ontology to search within * @param searchTerm - The term to search for + * @param eName - The eName identifier for multi-tenant isolation * @returns Array of matched meta-envelopes with their complete envelope sets */ async findMetaEnvelopesBySearchTerm< @@ -125,10 +133,15 @@ export class DbService { >( ontology: string, searchTerm: string, + eName: string, ): Promise> { + if (!eName) { + throw new Error("eName is required for searching meta-envelopes"); + } + const result = await this.runQuery( ` - MATCH (m:MetaEnvelope { ontology: $ontology })-[:LINKS_TO]->(e:Envelope) + MATCH (m:MetaEnvelope { ontology: $ontology, eName: $eName })-[:LINKS_TO]->(e:Envelope) WHERE CASE e.valueType WHEN 'string' THEN toLower(e.value) CONTAINS toLower($term) @@ -140,7 +153,7 @@ export class DbService { MATCH (m)-[:LINKS_TO]->(allEnvelopes:Envelope) RETURN m.id AS id, m.ontology AS ontology, m.acl AS acl, collect(allEnvelopes) AS envelopes `, - { ontology, term: searchTerm }, + { ontology, term: searchTerm, eName }, ); return result.records.map((record): MetaEnvelopeResult => { @@ -180,20 +193,24 @@ export class DbService { /** * Finds multiple meta-envelopes by an array of IDs. * @param ids - Array of MetaEnvelope IDs + * @param eName - The eName identifier for multi-tenant isolation * @returns Array of meta-envelopes with envelopes and parsed payload */ async findMetaEnvelopesByIds< T extends Record = Record, - >(ids: string[]): Promise[]> { + >(ids: string[], eName: string): Promise[]> { if (!ids.length) return []; + if (!eName) { + throw new Error("eName is required for finding meta-envelopes by IDs"); + } const result = await this.runQuery( ` - MATCH (m:MetaEnvelope)-[:LINKS_TO]->(e:Envelope) + MATCH (m:MetaEnvelope { eName: $eName })-[:LINKS_TO]->(e:Envelope) WHERE m.id IN $ids RETURN m.id AS id, m.ontology AS ontology, m.acl AS acl, collect(e) AS envelopes `, - { ids }, + { ids, eName }, ); return result.records.map((record): MetaEnvelopeResult => { @@ -233,17 +250,22 @@ export class DbService { /** * Finds a meta-envelope by its ID. * @param id - The ID of the meta-envelope to find + * @param eName - The eName identifier for multi-tenant isolation * @returns The meta-envelope with all its envelopes and parsed payload, or null if not found */ async findMetaEnvelopeById< T extends Record = Record, - >(id: string): Promise | null> { + >(id: string, eName: string): Promise | null> { + if (!eName) { + throw new Error("eName is required for finding meta-envelopes by ID"); + } + const result = await this.runQuery( ` - MATCH (m:MetaEnvelope { id: $id })-[:LINKS_TO]->(e:Envelope) + MATCH (m:MetaEnvelope { id: $id, eName: $eName })-[:LINKS_TO]->(e:Envelope) RETURN m.id AS id, m.ontology AS ontology, m.acl AS acl, collect(e) AS envelopes `, - { id }, + { id, eName }, ); if (!result.records[0]) return null; @@ -284,17 +306,22 @@ export class DbService { /** * Finds all meta-envelopes by ontology with their envelopes and parsed payload. * @param ontology - The ontology to search for + * @param eName - The eName identifier for multi-tenant isolation * @returns Array of meta-envelopes */ async findMetaEnvelopesByOntology< T extends Record = Record, - >(ontology: string): Promise[]> { + >(ontology: string, eName: string): Promise[]> { + if (!eName) { + throw new Error("eName is required for finding meta-envelopes by ontology"); + } + const result = await this.runQuery( ` - MATCH (m:MetaEnvelope { ontology: $ontology })-[:LINKS_TO]->(e:Envelope) + MATCH (m:MetaEnvelope { ontology: $ontology, eName: $eName })-[:LINKS_TO]->(e:Envelope) RETURN m.id AS id, m.ontology AS ontology, m.acl AS acl, collect(e) AS envelopes `, - { ontology }, + { ontology, eName }, ); return result.records.map((record) => { @@ -334,14 +361,19 @@ export class DbService { /** * Deletes a meta-envelope and all its associated envelopes. * @param id - The ID of the meta-envelope to delete + * @param eName - The eName identifier for multi-tenant isolation */ - async deleteMetaEnvelope(id: string): Promise { + async deleteMetaEnvelope(id: string, eName: string): Promise { + if (!eName) { + throw new Error("eName is required for deleting meta-envelopes"); + } + await this.runQuery( ` - MATCH (m:MetaEnvelope { id: $id })-[:LINKS_TO]->(e:Envelope) + MATCH (m:MetaEnvelope { id: $id, eName: $eName })-[:LINKS_TO]->(e:Envelope) DETACH DELETE m, e `, - { id }, + { id, eName }, ); } @@ -349,20 +381,27 @@ export class DbService { * Updates the value of an envelope. * @param envelopeId - The ID of the envelope to update * @param newValue - The new value to set + * @param eName - The eName identifier for multi-tenant isolation */ async updateEnvelopeValue( envelopeId: string, newValue: T, + eName: string, ): Promise { + if (!eName) { + throw new Error("eName is required for updating envelope values"); + } + const { value: storedValue, type: valueType } = serializeValue(newValue); + // First verify the envelope belongs to a meta-envelope with the correct eName await this.runQuery( ` - MATCH (e:Envelope { id: $envelopeId }) + MATCH (m:MetaEnvelope { eName: $eName })-[:LINKS_TO]->(e:Envelope { id: $envelopeId }) SET e.value = $newValue, e.valueType = $valueType `, - { envelopeId, newValue: storedValue, valueType }, + { envelopeId, newValue: storedValue, valueType, eName }, ); } @@ -371,6 +410,7 @@ export class DbService { * @param id - The ID of the meta-envelope to update * @param meta - The updated meta-envelope data * @param acl - The updated access control list + * @param eName - The eName identifier for multi-tenant isolation * @returns The updated meta-envelope and its envelopes */ async updateMetaEnvelopeById< @@ -379,9 +419,14 @@ export class DbService { id: string, meta: Omit, "id">, acl: string[], + eName: string, ): Promise> { + if (!eName) { + throw new Error("eName is required for updating meta-envelopes"); + } + try { - let existing = await this.findMetaEnvelopeById(id); + let existing = await this.findMetaEnvelopeById(id, eName); if (!existing) { const metaW3id = await new W3IDBuilder().build(); await this.runQuery( @@ -389,21 +434,22 @@ export class DbService { CREATE (m:MetaEnvelope { id: $id, ontology: $ontology, - acl: $acl + acl: $acl, + eName: $eName }) `, - { id, ontology: meta.ontology, acl } + { id, ontology: meta.ontology, acl, eName } ); existing = { id, ontology: meta.ontology, acl, parsed: meta.payload, envelopes: [] }; } - // Update the meta-envelope properties + // Update the meta-envelope properties (ensure eName matches) await this.runQuery( ` - MATCH (m:MetaEnvelope { id: $id }) + MATCH (m:MetaEnvelope { id: $id, eName: $eName }) SET m.ontology = $ontology, m.acl = $acl `, - { id, ontology: meta.ontology, acl } + { id, ontology: meta.ontology, acl, eName } ); const createdEnvelopes: Envelope[] = []; @@ -445,7 +491,7 @@ export class DbService { await this.runQuery( ` - MATCH (m:MetaEnvelope { id: $metaId }) + MATCH (m:MetaEnvelope { id: $metaId, eName: $eName }) CREATE (${alias}:Envelope { id: $${alias}_id, ontology: $${alias}_ontology, @@ -457,6 +503,7 @@ export class DbService { `, { metaId: id, + eName: eName, [`${alias}_id`]: envelopeId, [`${alias}_ontology`]: key, [`${alias}_value`]: storedValue, @@ -515,11 +562,19 @@ export class DbService { } /** - * Retrieves all envelopes in the system. - * @returns Array of all envelopes + * Retrieves all envelopes for a specific eName. + * @param eName - The eName identifier for multi-tenant isolation + * @returns Array of all envelopes for the given eName */ - async getAllEnvelopes(): Promise> { - const result = await this.runQuery(`MATCH (e:Envelope) RETURN e`, {}); + async getAllEnvelopes(eName: string): Promise> { + if (!eName) { + throw new Error("eName is required for getting all envelopes"); + } + + const result = await this.runQuery( + `MATCH (m:MetaEnvelope { eName: $eName })-[:LINKS_TO]->(e:Envelope) RETURN e`, + { eName } + ); return result.records.map((r): Envelope => { const node = r.get("e"); const properties = node.properties; diff --git a/infrastructure/evault-core/src/core/db/migrations/add-ename-index.ts b/infrastructure/evault-core/src/core/db/migrations/add-ename-index.ts new file mode 100644 index 00000000..b7e12909 --- /dev/null +++ b/infrastructure/evault-core/src/core/db/migrations/add-ename-index.ts @@ -0,0 +1,34 @@ +/** + * Neo4j Migration: Add eName index for multi-tenant performance + * + * This migration creates an index on the eName property of MetaEnvelope nodes + * to ensure optimal query performance for multi-tenant isolation. + * + * Run this migration once to create the index: + * ```cypher + * CREATE INDEX eName_index FOR (m:MetaEnvelope) ON (m.eName) + * ``` + */ + +import { Driver } from "neo4j-driver"; + +export async function createENameIndex(driver: Driver): Promise { + const session = driver.session(); + try { + await session.run( + `CREATE INDEX eName_index IF NOT EXISTS FOR (m:MetaEnvelope) ON (m.eName)` + ); + console.log("Created eName index on MetaEnvelope nodes"); + } catch (error) { + // Index might already exist, which is fine + if (error instanceof Error && error.message.includes("already exists")) { + console.log("eName index already exists"); + } else { + console.error("Error creating eName index:", error); + throw error; + } + } finally { + await session.close(); + } +} + diff --git a/infrastructure/evault-core/src/db/retry-neo4j.ts b/infrastructure/evault-core/src/core/db/retry-neo4j.ts similarity index 100% rename from infrastructure/evault-core/src/db/retry-neo4j.ts rename to infrastructure/evault-core/src/core/db/retry-neo4j.ts diff --git a/infrastructure/evault-core/src/db/schema.ts b/infrastructure/evault-core/src/core/db/schema.ts similarity index 100% rename from infrastructure/evault-core/src/db/schema.ts rename to infrastructure/evault-core/src/core/db/schema.ts diff --git a/infrastructure/evault-core/src/db/types.ts b/infrastructure/evault-core/src/core/db/types.ts similarity index 89% rename from infrastructure/evault-core/src/db/types.ts rename to infrastructure/evault-core/src/core/db/types.ts index b76f9808..050b1fd8 100644 --- a/infrastructure/evault-core/src/db/types.ts +++ b/infrastructure/evault-core/src/core/db/types.ts @@ -21,6 +21,7 @@ export type Envelope = { /** * Base result type for all database operations that return a meta-envelope. * Includes the parsed payload structure reconstructed from the envelopes. + * Note: eName is stored internally but not exposed in GraphQL responses. */ export type MetaEnvelopeResult< T extends Record = Record @@ -30,6 +31,8 @@ export type MetaEnvelopeResult< acl: string[]; envelopes: Envelope[]; parsed: T; + // eName is stored internally but never returned in API responses + eName?: string; }; /** diff --git a/infrastructure/evault-core/src/http/server.ts b/infrastructure/evault-core/src/core/http/server.ts similarity index 81% rename from infrastructure/evault-core/src/http/server.ts rename to infrastructure/evault-core/src/core/http/server.ts index 4bbda27f..b5f7f5c0 100644 --- a/infrastructure/evault-core/src/http/server.ts +++ b/infrastructure/evault-core/src/core/http/server.ts @@ -2,6 +2,7 @@ import fastify, { FastifyInstance } from "fastify"; import swagger from "@fastify/swagger"; import swaggerUi from "@fastify/swagger-ui"; import { WatcherRequest, TypedRequest, TypedReply } from "./types"; +import { ProvisioningService, ProvisionRequest } from "../../services/ProvisioningService"; interface WatcherSignatureRequest { w3id: string; @@ -15,7 +16,8 @@ interface WatcherSignatureRequest { export async function registerHttpRoutes( server: FastifyInstance, - evault: any // EVault instance to access publicKey + evault: any, // EVault instance to access publicKey + provisioningService?: ProvisioningService ): Promise { // Register Swagger await server.register(swagger, { @@ -31,6 +33,10 @@ export async function registerHttpRoutes( name: "watchers", description: "Watcher signature related endpoints", }, + { + name: "provisioning", + description: "eVault provisioning endpoints", + }, ], }, }); @@ -250,4 +256,46 @@ export async function registerHttpRoutes( } ); */ + + // Provision eVault endpoint + if (provisioningService) { + server.post<{ Body: ProvisionRequest }>( + "/provision", + { + schema: { + tags: ["provisioning"], + description: "Provision a new eVault instance (logical only, no infrastructure)", + body: { + type: "object", + required: ["registryEntropy", "namespace", "verificationId", "publicKey"], + properties: { + registryEntropy: { type: "string" }, + namespace: { type: "string" }, + verificationId: { type: "string" }, + publicKey: { type: "string" }, + }, + }, + response: { + 200: { + type: "object", + properties: { + success: { type: "boolean" }, + w3id: { type: "string" }, + uri: { type: "string" }, + message: { type: "string" }, + error: { type: "string" }, + }, + }, + }, + }, + }, + async (request: TypedRequest, reply: TypedReply) => { + const result = await provisioningService.provisionEVault(request.body); + if (!result.success) { + return reply.status(500).send(result); + } + return result; + } + ); + } } diff --git a/infrastructure/evault-core/src/http/types.ts b/infrastructure/evault-core/src/core/http/types.ts similarity index 100% rename from infrastructure/evault-core/src/http/types.ts rename to infrastructure/evault-core/src/core/http/types.ts diff --git a/infrastructure/evault-core/src/protocol/examples/examples.ts b/infrastructure/evault-core/src/core/protocol/examples/examples.ts similarity index 100% rename from infrastructure/evault-core/src/protocol/examples/examples.ts rename to infrastructure/evault-core/src/core/protocol/examples/examples.ts diff --git a/infrastructure/evault-core/src/protocol/graphql-server.ts b/infrastructure/evault-core/src/core/protocol/graphql-server.ts similarity index 83% rename from infrastructure/evault-core/src/protocol/graphql-server.ts rename to infrastructure/evault-core/src/core/protocol/graphql-server.ts index c1336672..ad851a0f 100644 --- a/infrastructure/evault-core/src/protocol/graphql-server.ts +++ b/infrastructure/evault-core/src/core/protocol/graphql-server.ts @@ -129,28 +129,42 @@ export class GraphQLServer { Query: { getMetaEnvelopeById: this.accessGuard.middleware( - (_: any, { id }: { id: string }) => { - return this.db.findMetaEnvelopeById(id); + (_: any, { id }: { id: string }, context: VaultContext) => { + if (!context.eName) { + throw new Error("X-ENAME header is required"); + } + return this.db.findMetaEnvelopeById(id, context.eName); } ), findMetaEnvelopesByOntology: this.accessGuard.middleware( - (_: any, { ontology }: { ontology: string }) => { - return this.db.findMetaEnvelopesByOntology(ontology); + (_: any, { ontology }: { ontology: string }, context: VaultContext) => { + if (!context.eName) { + throw new Error("X-ENAME header is required"); + } + return this.db.findMetaEnvelopesByOntology(ontology, context.eName); } ), searchMetaEnvelopes: this.accessGuard.middleware( ( _: any, - { ontology, term }: { ontology: string; term: string } + { ontology, term }: { ontology: string; term: string }, + context: VaultContext ) => { + if (!context.eName) { + throw new Error("X-ENAME header is required"); + } return this.db.findMetaEnvelopesBySearchTerm( ontology, - term + term, + context.eName ); } ), - getAllEnvelopes: this.accessGuard.middleware(() => { - return this.db.getAllEnvelopes(); + getAllEnvelopes: this.accessGuard.middleware((_: any, __: any, context: VaultContext) => { + if (!context.eName) { + throw new Error("X-ENAME header is required"); + } + return this.db.getAllEnvelopes(context.eName); }), }, @@ -169,13 +183,17 @@ export class GraphQLServer { }, context: VaultContext ) => { + if (!context.eName) { + throw new Error("X-ENAME header is required"); + } const result = await this.db.storeMetaEnvelope( { ontology: input.ontology, payload: input.payload, acl: input.acl, }, - input.acl + input.acl, + context.eName ); // Deliver webhooks for create operation @@ -225,6 +243,9 @@ export class GraphQLServer { }, context: VaultContext ) => { + if (!context.eName) { + throw new Error("X-ENAME header is required"); + } try { const result = await this.db.updateMetaEnvelopeById( id, @@ -233,7 +254,8 @@ export class GraphQLServer { payload: input.payload, acl: input.acl, }, - input.acl + input.acl, + context.eName ); // Deliver webhooks for update operation @@ -264,8 +286,11 @@ export class GraphQLServer { } ), deleteMetaEnvelope: this.accessGuard.middleware( - async (_: any, { id }: { id: string }) => { - await this.db.deleteMetaEnvelope(id); + async (_: any, { id }: { id: string }, context: VaultContext) => { + if (!context.eName) { + throw new Error("X-ENAME header is required"); + } + await this.db.deleteMetaEnvelope(id, context.eName); return true; } ), @@ -275,9 +300,13 @@ export class GraphQLServer { { envelopeId, newValue, - }: { envelopeId: string; newValue: any } + }: { envelopeId: string; newValue: any }, + context: VaultContext ) => { - await this.db.updateEnvelopeValue(envelopeId, newValue); + if (!context.eName) { + throw new Error("X-ENAME header is required"); + } + await this.db.updateEnvelopeValue(envelopeId, newValue, context.eName); return true; } ), @@ -298,16 +327,19 @@ export class GraphQLServer { context: async ({ request }) => { const authHeader = request.headers.get("authorization") ?? ""; const token = authHeader.replace("Bearer ", ""); + const eName = request.headers.get("x-ename") ?? request.headers.get("X-ENAME") ?? null; if (token) { const id = getJWTHeader(token).kid?.split("#")[0]; return { currentUser: id ?? null, + eName: eName, }; } return { currentUser: null, + eName: eName, }; }, }); diff --git a/infrastructure/evault-core/src/protocol/typedefs.ts b/infrastructure/evault-core/src/core/protocol/typedefs.ts similarity index 100% rename from infrastructure/evault-core/src/protocol/typedefs.ts rename to infrastructure/evault-core/src/core/protocol/typedefs.ts diff --git a/infrastructure/evault-core/src/core/protocol/vault-access-guard.spec.ts b/infrastructure/evault-core/src/core/protocol/vault-access-guard.spec.ts new file mode 100644 index 00000000..ff3fa64a --- /dev/null +++ b/infrastructure/evault-core/src/core/protocol/vault-access-guard.spec.ts @@ -0,0 +1,430 @@ +import { describe, it, expect, beforeAll, afterAll, beforeEach, vi } from "vitest"; +import { VaultAccessGuard, VaultContext } from "./vault-access-guard"; +import { DbService } from "../db/db.service"; +import { setupTestNeo4j, teardownTestNeo4j } from "../../test-utils/neo4j-setup"; +import { Driver } from "neo4j-driver"; +import axios from "axios"; +import * as jose from "jose"; +import { SignJWT, generateKeyPair, exportJWK } from "jose"; + +vi.mock("axios"); +const mockedAxios = axios as any; + +describe("VaultAccessGuard", () => { + let driver: Driver; + let dbService: DbService; + let guard: VaultAccessGuard; + let testPrivateKey: any; + let testPublicKey: any; + let testJWK: any; + + beforeAll(async () => { + const setup = await setupTestNeo4j(); + driver = setup.driver; + dbService = new DbService(driver); + guard = new VaultAccessGuard(dbService); + + // Generate test keys for JWT signing + const { publicKey, privateKey } = await generateKeyPair("ES256", { + extractable: true, + }); + + testPrivateKey = privateKey; + testPublicKey = publicKey; + testJWK = await exportJWK(privateKey); + testJWK.kid = "entropy-key-1"; + testJWK.alg = "ES256"; + testJWK.use = "sig"; + + process.env.REGISTRY_URL = "http://localhost:4322"; + }, 120000); + + afterAll(async () => { + await teardownTestNeo4j(); + delete process.env.REGISTRY_URL; + }); + + beforeEach(() => { + vi.clearAllMocks(); + + // Mock JWKS endpoint + mockedAxios.get.mockResolvedValue({ + data: { + keys: [{ ...testJWK, d: undefined }], // Public key only + }, + }); + }); + + const createMockContext = (overrides: Partial = {}): VaultContext => { + const mockRequest = { + headers: new Headers(overrides.request?.headers || {}), + } as any; + return { + request: mockRequest, + currentUser: overrides.currentUser || null, + eName: overrides.eName || null, + ...overrides, + } as VaultContext; + }; + + const createValidToken = async (payload: any = {}): Promise => { + return await new SignJWT(payload) + .setProtectedHeader({ alg: "ES256", kid: "entropy-key-1" }) + .setIssuedAt() + .setExpirationTime("1h") + .sign(testPrivateKey); + }; + + describe("validateToken", () => { + it("should validate valid JWT token", async () => { + const token = await createValidToken({ platform: "test-platform" }); + const context = createMockContext({ + request: { + headers: new Headers({ + authorization: `Bearer ${token}`, + }), + } as any, + }); + + // Access private method through reflection for testing + const validateToken = (guard as any).validateToken.bind(guard); + const result = await validateToken(`Bearer ${token}`); + + expect(result).toBeDefined(); + expect(result.platform).toBe("test-platform"); + }); + + it("should return null for invalid token", async () => { + const validateToken = (guard as any).validateToken.bind(guard); + const result = await validateToken("Bearer invalid-token"); + + expect(result).toBeNull(); + }); + + it("should return null for missing token", async () => { + const validateToken = (guard as any).validateToken.bind(guard); + const result = await validateToken(null); + + expect(result).toBeNull(); + }); + + it("should return null when REGISTRY_URL is not set", async () => { + const originalUrl = process.env.REGISTRY_URL; + delete process.env.REGISTRY_URL; + + const validateToken = (guard as any).validateToken.bind(guard); + const result = await validateToken("Bearer token"); + + expect(result).toBeNull(); + + process.env.REGISTRY_URL = originalUrl; + }); + }); + + describe("checkAccess", () => { + it("should allow access with valid token", async () => { + const token = await createValidToken({ platform: "test-platform" }); + const context = createMockContext({ + request: { + headers: new Headers({ + authorization: `Bearer ${token}`, + }), + } as any, + eName: "test@example.com", + }); + + const checkAccess = (guard as any).checkAccess.bind(guard); + const result = await checkAccess("meta-envelope-id", context); + + expect(result).toBe(true); + expect(context.tokenPayload).toBeDefined(); + }); + + it("should allow access with ACL '*'", async () => { + const eName = "test@example.com"; + const metaEnvelope = await dbService.storeMetaEnvelope( + { + ontology: "Test", + payload: { field: "value" }, + acl: ["*"], + }, + ["*"], + eName + ); + + const context = createMockContext({ + eName, + currentUser: "user-123", + }); + + const checkAccess = (guard as any).checkAccess.bind(guard); + const result = await checkAccess(metaEnvelope.metaEnvelope.id, context); + + expect(result).toBe(true); + }); + + it("should allow access when user is in ACL", async () => { + const eName = "test@example.com"; + const metaEnvelope = await dbService.storeMetaEnvelope( + { + ontology: "Test", + payload: { field: "value" }, + acl: ["user-123"], + }, + ["user-123"], + eName + ); + + const context = createMockContext({ + eName, + currentUser: "user-123", + }); + + const checkAccess = (guard as any).checkAccess.bind(guard); + const result = await checkAccess(metaEnvelope.metaEnvelope.id, context); + + expect(result).toBe(true); + }); + + it("should deny access when user is not in ACL", async () => { + const eName = "test@example.com"; + const metaEnvelope = await dbService.storeMetaEnvelope( + { + ontology: "Test", + payload: { field: "value" }, + acl: ["other-user"], + }, + ["other-user"], + eName + ); + + const context = createMockContext({ + eName, + currentUser: "user-123", + }); + + const checkAccess = (guard as any).checkAccess.bind(guard); + const result = await checkAccess(metaEnvelope.metaEnvelope.id, context); + + expect(result).toBe(false); + }); + + it("should throw error when eName header is missing", async () => { + const context = createMockContext({ + currentUser: "user-123", + // eName is null + }); + + const checkAccess = (guard as any).checkAccess.bind(guard); + + await expect(checkAccess("meta-envelope-id", context)).rejects.toThrow( + "X-ENAME header is required" + ); + }); + + it("should prevent access to meta-envelopes from different eName (data leak prevention)", async () => { + const eName1 = "tenant1@example.com"; + const eName2 = "tenant2@example.com"; + + // Create meta-envelope for tenant1 + const metaEnvelope = await dbService.storeMetaEnvelope( + { + ontology: "SecretData", + payload: { secret: "tenant1-secret-value" }, + acl: ["*"], // Public ACL + }, + ["*"], // Public ACL + eName1 + ); + + // Try to access tenant1's data with tenant2's eName + const context = createMockContext({ + eName: eName2, // Different eName! + currentUser: "user-123", + }); + + const checkAccess = (guard as any).checkAccess.bind(guard); + + // Should return false because the meta-envelope won't be found with eName2 + const result = await checkAccess(metaEnvelope.metaEnvelope.id, context); + expect(result).toBe(false); + }); + + it("should allow access only to meta-envelopes matching the provided eName", async () => { + const eName1 = "tenant1@example.com"; + const eName2 = "tenant2@example.com"; + + // Create meta-envelopes for both tenants + const metaEnvelope1 = await dbService.storeMetaEnvelope( + { + ontology: "Tenant1Data", + payload: { data: "tenant1-data" }, + acl: ["*"], + }, + ["*"], + eName1 + ); + + const metaEnvelope2 = await dbService.storeMetaEnvelope( + { + ontology: "Tenant2Data", + payload: { data: "tenant2-data" }, + acl: ["*"], + }, + ["*"], + eName2 + ); + + // Tenant1 should only access their own data + const context1 = createMockContext({ + eName: eName1, + currentUser: "user-123", + }); + + const checkAccess = (guard as any).checkAccess.bind(guard); + + const result1 = await checkAccess(metaEnvelope1.metaEnvelope.id, context1); + expect(result1).toBe(true); + + // Tenant1 should NOT access tenant2's data + const result2 = await checkAccess(metaEnvelope2.metaEnvelope.id, context1); + expect(result2).toBe(false); + + // Tenant2 should only access their own data + const context2 = createMockContext({ + eName: eName2, + currentUser: "user-123", + }); + + const result3 = await checkAccess(metaEnvelope2.metaEnvelope.id, context2); + expect(result3).toBe(true); + + // Tenant2 should NOT access tenant1's data + const result4 = await checkAccess(metaEnvelope1.metaEnvelope.id, context2); + expect(result4).toBe(false); + }); + + it("should allow access with ACL '*' even without currentUser", async () => { + const eName = "test@example.com"; + const metaEnvelope = await dbService.storeMetaEnvelope( + { + ontology: "Test", + payload: { field: "value" }, + acl: ["*"], + }, + ["*"], + eName + ); + + const context = createMockContext({ + eName, + currentUser: null, + }); + + const checkAccess = (guard as any).checkAccess.bind(guard); + const result = await checkAccess(metaEnvelope.metaEnvelope.id, context); + + expect(result).toBe(true); + }); + }); + + describe("middleware", () => { + it("should filter ACL from responses", async () => { + const eName = "test@example.com"; + const metaEnvelope = await dbService.storeMetaEnvelope( + { + ontology: "Test", + payload: { field: "value" }, + acl: ["user-123"], + }, + ["user-123"], + eName + ); + + const context = createMockContext({ + eName, + currentUser: "user-123", + }); + + const mockResolver = vi.fn(async () => { + const result = await dbService.findMetaEnvelopeById( + metaEnvelope.metaEnvelope.id, + eName + ); + return result; + }); + + const wrappedResolver = guard.middleware(mockResolver); + const result = await wrappedResolver(null, { id: metaEnvelope.metaEnvelope.id }, context); + + expect(result).toBeDefined(); + expect(result.acl).toBeUndefined(); // ACL should be filtered + }); + + it("should throw error when access is denied", async () => { + const eName = "test@example.com"; + const metaEnvelope = await dbService.storeMetaEnvelope( + { + ontology: "Test", + payload: { field: "value" }, + acl: ["other-user"], + }, + ["other-user"], + eName + ); + + const context = createMockContext({ + eName, + currentUser: "user-123", + }); + + const mockResolver = vi.fn(async () => { + return await dbService.findMetaEnvelopeById(metaEnvelope.metaEnvelope.id, eName); + }); + + const wrappedResolver = guard.middleware(mockResolver); + + await expect( + wrappedResolver(null, { id: metaEnvelope.metaEnvelope.id }, context) + ).rejects.toThrow("Access denied"); + }); + + it("should prevent data leak when accessing with wrong eName in middleware", async () => { + const eName1 = "tenant1@example.com"; + const eName2 = "tenant2@example.com"; + + // Create meta-envelope for tenant1 + const metaEnvelope = await dbService.storeMetaEnvelope( + { + ontology: "SecretData", + payload: { secret: "tenant1-secret" }, + acl: ["*"], // Public ACL + }, + ["*"], // Public ACL + eName1 + ); + + // Try to access with tenant2's eName + const context = createMockContext({ + eName: eName2, // Wrong eName! + currentUser: "user-123", + }); + + const mockResolver = vi.fn(async () => { + // Resolver tries to fetch with the context's eName + return await dbService.findMetaEnvelopeById( + metaEnvelope.metaEnvelope.id, + eName2 // Using wrong eName + ); + }); + + const wrappedResolver = guard.middleware(mockResolver); + + // Should throw "Access denied" because the meta-envelope won't be found with eName2 + await expect( + wrappedResolver(null, { id: metaEnvelope.metaEnvelope.id }, context) + ).rejects.toThrow("Access denied"); + }); + }); +}); + diff --git a/infrastructure/evault-core/src/protocol/vault-access-guard.ts b/infrastructure/evault-core/src/core/protocol/vault-access-guard.ts similarity index 95% rename from infrastructure/evault-core/src/protocol/vault-access-guard.ts rename to infrastructure/evault-core/src/core/protocol/vault-access-guard.ts index ede3f2c8..0ef39b7f 100644 --- a/infrastructure/evault-core/src/protocol/vault-access-guard.ts +++ b/infrastructure/evault-core/src/core/protocol/vault-access-guard.ts @@ -7,6 +7,7 @@ import axios from "axios"; export type VaultContext = YogaInitialContext & { currentUser: string | null; tokenPayload?: any; + eName: string | null; }; export class VaultAccessGuard { @@ -71,16 +72,22 @@ export class VaultAccessGuard { return true; } + // Validate eName is present + if (!context.eName) { + throw new Error("X-ENAME header is required for access control"); + } + // Fallback to original ACL logic if no valid token if (!context.currentUser) { const metaEnvelope = await this.db.findMetaEnvelopeById( - metaEnvelopeId + metaEnvelopeId, + context.eName ); if (metaEnvelope && metaEnvelope.acl.includes("*")) return true; return false; } - const metaEnvelope = await this.db.findMetaEnvelopeById(metaEnvelopeId); + const metaEnvelope = await this.db.findMetaEnvelopeById(metaEnvelopeId, context.eName); if (!metaEnvelope) { return false; } diff --git a/infrastructure/evault-core/src/core/provisioning/config/database.ts b/infrastructure/evault-core/src/core/provisioning/config/database.ts new file mode 100644 index 00000000..5137acbf --- /dev/null +++ b/infrastructure/evault-core/src/core/provisioning/config/database.ts @@ -0,0 +1,19 @@ +import { DataSource } from "typeorm"; +import { Verification } from "../entities/Verification"; +import * as dotenv from "dotenv"; +import { join } from "path"; + +// Load environment variables from root .env file +dotenv.config({ path: join(__dirname, "../../../../.env") }); + +export const ProvisioningDataSource = new DataSource({ + type: "postgres", + url: process.env.REGISTRY_DATABASE_URL || process.env.PROVISIONER_DATABASE_URL || "postgresql://postgres:postgres@localhost:5432/registry", + logging: process.env.DB_LOGGING === "true", + entities: [Verification], + synchronize: false, + migrations: [], + migrationsTableName: "migrations", + subscribers: [], +}); + diff --git a/infrastructure/evault-provisioner/src/entities/Verification.ts b/infrastructure/evault-core/src/core/provisioning/entities/Verification.ts similarity index 99% rename from infrastructure/evault-provisioner/src/entities/Verification.ts rename to infrastructure/evault-core/src/core/provisioning/entities/Verification.ts index 05af9474..021b4c12 100644 --- a/infrastructure/evault-provisioner/src/entities/Verification.ts +++ b/infrastructure/evault-core/src/core/provisioning/entities/Verification.ts @@ -50,3 +50,4 @@ export class Verification { @UpdateDateColumn() updatedAt!: Date; } + diff --git a/infrastructure/evault-core/src/core/provisioning/services/ProvisioningService.ts b/infrastructure/evault-core/src/core/provisioning/services/ProvisioningService.ts new file mode 100644 index 00000000..fca6fb7c --- /dev/null +++ b/infrastructure/evault-core/src/core/provisioning/services/ProvisioningService.ts @@ -0,0 +1,124 @@ +import axios, { AxiosError } from "axios"; +import { W3IDBuilder } from "w3id"; +import * as jose from "jose"; +import { VerificationService } from "../../../services/VerificationService"; + +export interface ProvisionRequest { + registryEntropy: string; + namespace: string; + verificationId: string; + publicKey: string; +} + +export interface ProvisionResponse { + success: boolean; + uri?: string; + w3id?: string; + message?: string; + error?: string | unknown; +} + +export class ProvisioningService { + constructor(private verificationService: VerificationService) {} + + /** + * Provisions a new eVault logically (no infrastructure creation) + * @param request - Provision request containing registryEntropy, namespace, verificationId, and publicKey + * @returns Provision response with w3id (eName) and URI + */ + async provisionEVault(request: ProvisionRequest): Promise { + try { + if (!process.env.PUBLIC_REGISTRY_URL) { + throw new Error("PUBLIC_REGISTRY_URL is not set"); + } + + const { registryEntropy, namespace, verificationId, publicKey } = request; + + if (!registryEntropy || !namespace || !verificationId || !publicKey) { + return { + success: false, + error: "Missing required fields", + message: "Missing required fields: registryEntropy, namespace, verificationId, publicKey", + }; + } + + // Verify the registry entropy token + const jwksResponse = await axios.get( + new URL( + `/.well-known/jwks.json`, + process.env.PUBLIC_REGISTRY_URL + ).toString() + ); + + const JWKS = jose.createLocalJWKSet(jwksResponse.data); + const { payload } = await jose.jwtVerify(registryEntropy, JWKS); + + // Generate eName (W3ID) from entropy + const userId = await new W3IDBuilder() + .withNamespace(namespace) + .withEntropy(payload.entropy as string) + .withGlobal(true) + .build(); + + const w3id = userId.id; + + // Validate verification if not demo code + const demoCode = process.env.DEMO_CODE_W3DS || "d66b7138-538a-465f-a6ce-f6985854c3f4"; + if (verificationId !== demoCode) { + const verification = await this.verificationService.findById(verificationId); + if (!verification) { + throw new Error("verification doesn't exist"); + } + if (!verification.approved) { + throw new Error("verification not approved"); + } + if (verification.consumed) { + throw new Error("This verification ID has already been used"); + } + } + + // Update verification with linked eName + await this.verificationService.findByIdAndUpdate(verificationId, { linkedEName: w3id }); + + // Generate evault ID + const evaultId = await new W3IDBuilder().withGlobal(true).build(); + + // Build URI (IP:PORT format pointing to shared evault-core service) + const baseUri = process.env.EVAULT_BASE_URI || `http://${process.env.EVAULT_HOST || "localhost"}:${process.env.PORT || 4000}`; + const uri = baseUri; + + // Register in registry + await axios.post( + new URL( + "/register", + process.env.PUBLIC_REGISTRY_URL + ).toString(), + { + ename: w3id, + uri, + evault: evaultId.id, + }, + { + headers: { + Authorization: `Bearer ${process.env.REGISTRY_SHARED_SECRET}`, + }, + } + ); + + return { + success: true, + w3id, + uri, + }; + } catch (error) { + const axiosError = error as AxiosError; + console.error("Provisioning error:", error); + return { + success: false, + error: axiosError.response?.data || axiosError.message, + message: "Failed to provision evault instance", + }; + } + } +} + diff --git a/infrastructure/evault-provisioner/src/services/VerificationService.ts b/infrastructure/evault-core/src/core/provisioning/services/VerificationService.ts similarity index 97% rename from infrastructure/evault-provisioner/src/services/VerificationService.ts rename to infrastructure/evault-core/src/core/provisioning/services/VerificationService.ts index ced2185f..05646d4f 100644 --- a/infrastructure/evault-provisioner/src/services/VerificationService.ts +++ b/infrastructure/evault-core/src/core/provisioning/services/VerificationService.ts @@ -20,6 +20,7 @@ export class VerificationService { data: DeepPartial, ): Promise { const current = await this.findById(id); + if (!current) return null; const toSave = this.verificationRepository.create({ ...current, ...data, @@ -48,3 +49,4 @@ export class VerificationService { }); } } + diff --git a/infrastructure/evault-core/src/types/w3id.ts b/infrastructure/evault-core/src/core/types/w3id.ts similarity index 100% rename from infrastructure/evault-core/src/types/w3id.ts rename to infrastructure/evault-core/src/core/types/w3id.ts diff --git a/infrastructure/evault-core/src/utils/codec.ts b/infrastructure/evault-core/src/core/utils/codec.ts similarity index 100% rename from infrastructure/evault-core/src/utils/codec.ts rename to infrastructure/evault-core/src/core/utils/codec.ts diff --git a/infrastructure/evault-core/src/utils/signer.ts b/infrastructure/evault-core/src/core/utils/signer.ts similarity index 100% rename from infrastructure/evault-core/src/utils/signer.ts rename to infrastructure/evault-core/src/core/utils/signer.ts diff --git a/infrastructure/evault-core/src/w3id/log-service.ts b/infrastructure/evault-core/src/core/w3id/log-service.ts similarity index 100% rename from infrastructure/evault-core/src/w3id/log-service.ts rename to infrastructure/evault-core/src/core/w3id/log-service.ts diff --git a/infrastructure/evault-core/src/w3id/log-storage.ts b/infrastructure/evault-core/src/core/w3id/log-storage.ts similarity index 100% rename from infrastructure/evault-core/src/w3id/log-storage.ts rename to infrastructure/evault-core/src/core/w3id/log-storage.ts diff --git a/infrastructure/evault-core/src/w3id/w3id.ts b/infrastructure/evault-core/src/core/w3id/w3id.ts similarity index 100% rename from infrastructure/evault-core/src/w3id/w3id.ts rename to infrastructure/evault-core/src/core/w3id/w3id.ts diff --git a/infrastructure/evault-core/src/db/db.service.spec.ts b/infrastructure/evault-core/src/db/db.service.spec.ts deleted file mode 100644 index ab09324b..00000000 --- a/infrastructure/evault-core/src/db/db.service.spec.ts +++ /dev/null @@ -1,276 +0,0 @@ -import neo4j, { Driver } from "neo4j-driver"; -import { DbService } from "./db.service"; // adjust if needed -import { it, describe, beforeAll, afterAll, expect } from "vitest"; -import { Neo4jContainer, StartedNeo4jContainer } from "@testcontainers/neo4j"; - -type Envelope = { - id: string; - ontology: string; - value: any; - valueType: string; -}; - -describe("DbService (integration)", () => { - let container: StartedNeo4jContainer; - let service: DbService; - let driver: Driver; - - beforeAll(async () => { - container = await new Neo4jContainer("neo4j:5.15").start(); - - const username = container.getUsername(); - const password = container.getPassword(); - const boltPort = container.getMappedPort(7687); - const uri = `bolt://localhost:${boltPort}`; - - driver = neo4j.driver(uri, neo4j.auth.basic(username, password)); - service = new DbService(driver); - }); - - afterAll(async () => { - await service.close(); - await driver.close(); - await container.stop(); - }); - - it("should store and retrieve a meta-envelope with various data types", async () => { - const input = { - ontology: "TestTypes", - payload: { - string: "hello world", - number: 42, - boolean: true, - date: new Date("2025-04-10T00:00:00Z"), - array: [1, 2, 3], - object: { nested: { value: "deep" } }, - }, - acl: ["@test-user"], - }; - - const result = await service.storeMetaEnvelope(input, input.acl); - const id = result.metaEnvelope.id; - - const fetched = await service.findMetaEnvelopeById(id); - expect(fetched).toBeDefined(); - if (!fetched) return; - expect(fetched.id).toBeDefined(); - expect(fetched.ontology).toBe("TestTypes"); - expect(fetched.acl).toEqual(["@test-user"]); - expect(fetched.envelopes).toHaveLength(6); - - // Verify parsed field matches original payload - expect(fetched.parsed).toEqual(input.payload); - - // Verify each data type is properly stored and retrieved - const envelopes = fetched.envelopes.reduce( - (acc: Record, e: Envelope) => { - acc[e.ontology] = e; - return acc; - }, - {}, - ); - - expect(envelopes.string.value).toBe("hello world"); - expect(envelopes.string.valueType).toBe("string"); - - expect(envelopes.number.value).toBe(42); - expect(envelopes.number.valueType).toBe("number"); - - expect(envelopes.boolean.value).toBe(true); - expect(envelopes.boolean.valueType).toBe("boolean"); - - expect(envelopes.date.value).toBeInstanceOf(Date); - expect(envelopes.date.value.toISOString()).toBe( - "2025-04-10T00:00:00.000Z", - ); - expect(envelopes.date.valueType).toBe("date"); - - expect(envelopes.array.value).toEqual([1, 2, 3]); - expect(envelopes.array.valueType).toBe("array"); - - expect(envelopes.object.value).toEqual({ nested: { value: "deep" } }); - expect(envelopes.object.valueType).toBe("object"); - }); - - it("should find meta-envelopes containing the search term in any envelope value", async () => { - const input = { - ontology: "SocialMediaPost", - payload: { - text: "This is a searchable tweet", - image: "https://example.com/image.jpg", - likes: ["user1", "user2"], - }, - acl: ["@search-test-user"], - }; - - const metaEnv = await service.storeMetaEnvelope(input, input.acl); - - const found = await service.findMetaEnvelopesBySearchTerm( - "SocialMediaPost", - "searchable", - ); - - expect(Array.isArray(found)).toBe(true); - const match = found.find((m) => m.id === metaEnv.metaEnvelope.id); - expect(match).toBeDefined(); - if (!match) throw new Error(); - expect(match.envelopes.length).toBeGreaterThan(0); - expect( - match.envelopes.some((e) => e.value.includes("searchable")), - ).toBe(true); - }); - - it("should return empty array if no values contain the search term", async () => { - const found = await service.findMetaEnvelopesBySearchTerm( - "SocialMediaPost", - "notfoundterm", - ); - expect(Array.isArray(found)).toBe(true); - expect(found.length).toBe(0); - }); - - it("should find meta-envelopes by ontology", async () => { - const results = - await service.findMetaEnvelopesByOntology("SocialMediaPost"); - expect(Array.isArray(results)).toBe(true); - expect(results.length).toBeGreaterThan(0); - }); - - it("should delete a meta-envelope and its envelopes", async () => { - const meta = { - ontology: "TempPost", - payload: { - value: "to be deleted", - }, - acl: ["@delete-user"], - }; - - const stored = await service.storeMetaEnvelope(meta, meta.acl); - await service.deleteMetaEnvelope(stored.metaEnvelope.id); - - const deleted = await service.findMetaEnvelopeById( - stored.metaEnvelope.id, - ); - expect(deleted).toBeNull(); - }); - - it("should update envelope value with proper type handling", async () => { - const meta = { - ontology: "UpdateTest", - payload: { - value: "original", - }, - acl: ["@updater"], - }; - - const stored = await service.storeMetaEnvelope(meta, meta.acl); - - const result = await service.findMetaEnvelopeById( - stored.metaEnvelope.id, - ); - if (!result) return; - const targetEnvelope = result.envelopes.find( - (e: Envelope) => e.ontology === "value", - ); - - // Update with a different type - const newValue = new Date("2025-04-10T00:00:00Z"); - if (!targetEnvelope) return; - await service.updateEnvelopeValue(targetEnvelope.id, newValue); - - const updated = await service.findMetaEnvelopeById( - stored.metaEnvelope.id, - ); - if (!updated) return; - const updatedValue = updated.envelopes.find( - (e: Envelope) => e.id === targetEnvelope.id, - ); - - if (!updatedValue) return; - expect(updatedValue.value).toBeInstanceOf(Date); - expect(updatedValue.value.toISOString()).toBe( - "2025-04-10T00:00:00.000Z", - ); - expect(updatedValue.valueType).toBe("date"); - }); - - it("should find meta-envelopes containing the search term in any value type", async () => { - const input = { - ontology: "SearchTest", - payload: { - string: "This is a searchable string", - array: ["searchable", "array", "element"], - object: { text: "searchable object" }, - number: 42, - date: new Date("2025-04-10T00:00:00Z"), - }, - acl: ["@search-test-user"], - }; - - const metaEnv = await service.storeMetaEnvelope(input, input.acl); - - // Test search in string - const foundInString = await service.findMetaEnvelopesBySearchTerm( - "SearchTest", - "searchable string", - ); - expect(foundInString.length).toBeGreaterThan(0); - expect(foundInString[0].id).toBe(metaEnv.metaEnvelope.id); - - // Test search in array - const foundInArray = await service.findMetaEnvelopesBySearchTerm( - "SearchTest", - "searchable", - ); - expect(foundInArray.length).toBeGreaterThan(0); - expect(foundInArray[0].id).toBe(metaEnv.metaEnvelope.id); - - // Test search in object - const foundInObject = await service.findMetaEnvelopesBySearchTerm( - "SearchTest", - "searchable object", - ); - expect(foundInObject.length).toBeGreaterThan(0); - expect(foundInObject[0].id).toBe(metaEnv.metaEnvelope.id); - }); - - it("should find meta-envelopes containing the search term with parsed payload", async () => { - const input = { - ontology: "SearchTestHeyyy", - payload: { - string: "This is a searchable string", - array: ["searchable", "array", "element"], - object: { text: "searchable object" }, - number: 42, - date: new Date("2025-04-10T00:00:00Z"), - }, - acl: ["@search-test-user"], - }; - - const metaEnv = await service.storeMetaEnvelope(input, input.acl); - - // Test search in string - const foundInString = await service.findMetaEnvelopesBySearchTerm( - "SearchTestHeyyy", - "searchable string", - ); - expect(foundInString.length).toBeGreaterThan(0); - expect(foundInString[0].id).toBe(metaEnv.metaEnvelope.id); - - // Test search in array - const foundInArray = await service.findMetaEnvelopesBySearchTerm( - "SearchTestHeyyy", - "searchable", - ); - expect(foundInArray.length).toBeGreaterThan(0); - expect(foundInArray[0].id).toBe(metaEnv.metaEnvelope.id); - - // Test search in object - const foundInObject = await service.findMetaEnvelopesBySearchTerm( - "SearchTestHeyyy", - "searchable object", - ); - expect(foundInObject.length).toBeGreaterThan(0); - expect(foundInObject[0].id).toBe(metaEnv.metaEnvelope.id); - }); -}); diff --git a/infrastructure/evault-provisioner/src/entities/Notification.ts b/infrastructure/evault-core/src/entities/Notification.ts similarity index 68% rename from infrastructure/evault-provisioner/src/entities/Notification.ts rename to infrastructure/evault-core/src/entities/Notification.ts index 8b9ce998..500ecdd7 100644 --- a/infrastructure/evault-provisioner/src/entities/Notification.ts +++ b/infrastructure/evault-core/src/entities/Notification.ts @@ -5,22 +5,22 @@ export class Notification { @PrimaryGeneratedColumn("uuid") id!: string; - @Column() + @Column({ type: "varchar" }) eName!: string; - @Column() + @Column({ type: "varchar" }) title!: string; - @Column() + @Column({ type: "text" }) body!: string; @Column({ type: "jsonb", nullable: true }) data?: Record; - @Column({ default: false }) + @Column({ type: "boolean", default: false }) delivered!: boolean; - @Column({ nullable: true }) + @Column({ type: "timestamp", nullable: true }) deliveredAt!: Date; @CreateDateColumn() diff --git a/infrastructure/evault-core/src/entities/Verification.ts b/infrastructure/evault-core/src/entities/Verification.ts new file mode 100644 index 00000000..882f76aa --- /dev/null +++ b/infrastructure/evault-core/src/entities/Verification.ts @@ -0,0 +1,52 @@ +import { + Entity, + PrimaryGeneratedColumn, + Column, + CreateDateColumn, + UpdateDateColumn, +} from "typeorm"; + +@Entity() +export class Verification { + @PrimaryGeneratedColumn("uuid") + id!: string; + + @Column({ type: "varchar", nullable: true }) + veriffId!: string; + + @Column({ type: "boolean", nullable: true }) + approved!: boolean; + + @Column({ type: "jsonb", nullable: true }) + data!: Record; + + @Column({ type: "varchar", nullable: true }) + referenceId!: string; + + @Column({ type: "varchar", nullable: true }) + documentId!: string; + + @Column({ type: "boolean", default: false }) + consumed!: boolean; + + @Column({ type: "varchar", nullable: true }) + linkedEName!: string; + + @Column({ type: "varchar", nullable: true }) + deviceId!: string; + + @Column({ type: "varchar", nullable: true }) + platform!: string; + + @Column({ type: "varchar", nullable: true }) + fcmToken!: string; + + @Column({ type: "boolean", default: true }) + deviceActive!: boolean; + + @CreateDateColumn() + createdAt!: Date; + + @UpdateDateColumn() + updatedAt!: Date; +} diff --git a/infrastructure/evault-core/src/evault.ts b/infrastructure/evault-core/src/evault.ts deleted file mode 100644 index 13d33fb3..00000000 --- a/infrastructure/evault-core/src/evault.ts +++ /dev/null @@ -1,121 +0,0 @@ -import { DbService } from "./db/db.service"; -import { LogService } from "./w3id/log-service"; -import { GraphQLServer } from "./protocol/graphql-server"; -import { registerHttpRoutes } from "./http/server"; -import fastify, { - FastifyInstance, - FastifyRequest, - FastifyReply, -} from "fastify"; -import { renderVoyagerPage } from "graphql-voyager/middleware"; -import { createYoga } from "graphql-yoga"; -import dotenv from "dotenv"; -import path from "path"; -import neo4j, { Driver } from "neo4j-driver"; -import { W3ID } from "./w3id/w3id"; -import { connectWithRetry } from "./db/retry-neo4j"; - -dotenv.config({ path: path.resolve(__dirname, "../../../.env") }); - -class EVault { - server: FastifyInstance; - graphqlServer: GraphQLServer; - logService: LogService; - driver: Driver; - publicKey: string | null; - w3id: string | null; - - private constructor(driver: Driver) { - this.driver = driver; - this.publicKey = process.env.EVAULT_PUBLIC_KEY || null; - this.w3id = process.env.W3ID || null; - const dbService = new DbService(driver); - this.logService = new LogService(driver); - this.graphqlServer = new GraphQLServer(dbService, this.publicKey, this.w3id, this); - this.server = fastify({ - logger: true, - }); - } - - static async create(): Promise { - const uri = process.env.NEO4J_URI || "bolt://localhost:7687"; - const user = process.env.NEO4J_USER || "neo4j"; - const password = process.env.NEO4J_PASSWORD || "neo4j"; - - if ( - !process.env.NEO4J_URI || - !process.env.NEO4J_USER || - !process.env.NEO4J_PASSWORD - ) { - console.warn( - "Using default Neo4j connection parameters. Set NEO4J_URI, NEO4J_USER, and NEO4J_PASSWORD environment variables for custom configuration.", - ); - } - - if (!process.env.W3ID) { - console.warn( - "W3ID environment variable not set. The eVault will not have an associated W3ID identifier.", - ); - } - - if (!process.env.EVAULT_PUBLIC_KEY) { - console.warn( - "EVAULT_PUBLIC_KEY environment variable not set. The eVault will not have an associated public key for cryptographic operations.", - ); - } - - const driver = await connectWithRetry(uri, user, password); - return new EVault(driver); - } - - async initialize() { - await registerHttpRoutes(this.server, this); - - // No longer automatically create W3ID - just use the provided W3ID and public key - // The private key is now managed on the user's phone - - const yoga = this.graphqlServer.init(); - - this.server.route({ - // Bind to the Yoga's endpoint to avoid rendering on any path - url: yoga.graphqlEndpoint, - method: ["GET", "POST", "OPTIONS"], - handler: (req, reply) => - yoga.handleNodeRequestAndResponse(req, reply), - }); - - // Mount Voyager endpoint - this.server.get( - "/voyager", - (req: FastifyRequest, reply: FastifyReply) => { - reply.type("text/html").send( - renderVoyagerPage({ - endpointUrl: "/graphql", - }), - ); - }, - ); - } - - async start() { - await this.initialize(); - - const port = process.env.NOMAD_PORT_http || process.env.PORT || 4000; - - await this.server.listen({ port: Number(port), host: "0.0.0.0" }); - console.log(`Server started on http://0.0.0.0:${port}`); - console.log( - `GraphQL endpoint available at http://0.0.0.0:${port}/graphql`, - ); - console.log( - `GraphQL Voyager available at http://0.0.0.0:${port}/voyager`, - ); - console.log( - `API Documentation available at http://0.0.0.0:${port}/docs`, - ); - } -} - -EVault.create() - .then(evault => evault.start()) - .catch(console.error); diff --git a/infrastructure/evault-core/src/index.ts b/infrastructure/evault-core/src/index.ts new file mode 100644 index 00000000..d123e774 --- /dev/null +++ b/infrastructure/evault-core/src/index.ts @@ -0,0 +1,180 @@ +import "reflect-metadata"; +import express, { Request, Response } from "express"; +import dotenv from "dotenv"; +import path from "path"; +import { createHmacSignature } from "./utils/hmac"; +import cors from "cors"; +import { AppDataSource } from "./config/database"; +import { VerificationService } from "./services/VerificationService"; +import { VerificationController } from "./controllers/VerificationController"; +import { NotificationController } from "./controllers/NotificationController"; +import { ProvisioningController } from "./controllers/ProvisioningController"; +import { ProvisioningService } from "./services/ProvisioningService"; + +// Import evault-core functionality +import { DbService } from "./core/db/db.service"; +import { LogService } from "./core/w3id/log-service"; +import { GraphQLServer } from "./core/protocol/graphql-server"; +import { registerHttpRoutes } from "./core/http/server"; +import fastify, { + FastifyInstance, + FastifyRequest, + FastifyReply, +} from "fastify"; +import { renderVoyagerPage } from "graphql-voyager/middleware"; +import { connectWithRetry } from "./core/db/retry-neo4j"; +import neo4j, { Driver } from "neo4j-driver"; + +dotenv.config({ path: path.resolve(__dirname, "../../../.env") }); + +const expressApp = express(); +const expressPort = process.env.EXPRESS_PORT || process.env.PORT || 3001; +const fastifyPort = process.env.FASTIFY_PORT || process.env.PORT || 4000; + +// Configure CORS for SSE +expressApp.use( + cors({ + origin: "*", + methods: ["GET", "POST", "OPTIONS", "PATCH"], + allowedHeaders: ["Content-Type", "Authorization", "X-ENAME"], + credentials: true, + }) +); + +// Increase JSON payload limit to 50MB +expressApp.use(express.json({ limit: "50mb" })); +expressApp.use(express.urlencoded({ limit: "50mb", extended: true })); + +// Initialize database connection +const initializeDatabase = async () => { + try { + await AppDataSource.initialize(); + console.log("PostgreSQL database connection initialized"); + } catch (error) { + console.error("Error during database initialization:", error); + process.exit(1); + } +}; + +// Initialize services and controllers +let verificationService: VerificationService; +let verificationController: VerificationController; +let notificationController: NotificationController; +let provisioningController: ProvisioningController; + +// eVault Core initialization +let fastifyServer: FastifyInstance; +let graphqlServer: GraphQLServer; +let logService: LogService; +let driver: Driver; +let provisioningService: ProvisioningService | undefined; + + +// Initialize eVault Core +const initializeEVault = async (provisioningServiceInstance?: ProvisioningService) => { + const uri = process.env.NEO4J_URI || "bolt://localhost:7687"; + const user = process.env.NEO4J_USER || "neo4j"; + const password = process.env.NEO4J_PASSWORD || "neo4j"; + + if (!process.env.NEO4J_URI || !process.env.NEO4J_USER || !process.env.NEO4J_PASSWORD) { + console.warn( + "Using default Neo4j connection parameters. Set NEO4J_URI, NEO4J_USER, and NEO4J_PASSWORD environment variables for custom configuration." + ); + } + + driver = await connectWithRetry(uri, user, password); + + // Create eName index for multi-tenant performance + try { + const { createENameIndex } = await import("./core/db/migrations/add-ename-index"); + await createENameIndex(driver); + } catch (error) { + console.warn("Failed to create eName index:", error); + } + + const dbService = new DbService(driver); + logService = new LogService(driver); + const publicKey = process.env.EVAULT_PUBLIC_KEY || null; + const w3id = process.env.W3ID || null; + + const evaultInstance = { + publicKey, + w3id, + }; + + graphqlServer = new GraphQLServer(dbService, publicKey, w3id, evaultInstance); + + fastifyServer = fastify({ + logger: true, + }); + + // Register HTTP routes with provisioning service if available + await registerHttpRoutes(fastifyServer, evaultInstance, provisioningServiceInstance); + + // Setup GraphQL + const yoga = graphqlServer.init(); + + fastifyServer.route({ + url: yoga.graphqlEndpoint, + method: ["GET", "POST", "OPTIONS"], + handler: (req, reply) => yoga.handleNodeRequestAndResponse(req, reply), + }); + + // Mount Voyager endpoint + fastifyServer.get("/voyager", (req: FastifyRequest, reply: FastifyReply) => { + reply.type("text/html").send( + renderVoyagerPage({ + endpointUrl: "/graphql", + }) + ); + }); + + // Start Fastify server + await fastifyServer.listen({ port: Number(fastifyPort), host: "0.0.0.0" }); + console.log(`Fastify server (GraphQL/HTTP) started on http://0.0.0.0:${fastifyPort}`); + console.log(`GraphQL endpoint available at http://0.0.0.0:${fastifyPort}/graphql`); + console.log(`GraphQL Voyager available at http://0.0.0.0:${fastifyPort}/voyager`); + console.log(`API Documentation available at http://0.0.0.0:${fastifyPort}/docs`); +}; + +// Health check endpoint +expressApp.get("/health", (req: Request, res: Response) => { + res.json({ status: "ok" }); +}); + +// Start the server +const start = async () => { + try { + await initializeDatabase(); + + // Initialize services + const { Verification } = await import("./entities/Verification"); + verificationService = new VerificationService( + AppDataSource.getRepository(Verification) + ); + verificationController = new VerificationController(verificationService); + notificationController = new NotificationController(); + + // Initialize provisioning service (uses shared AppDataSource) + provisioningService = new ProvisioningService(verificationService); + provisioningController = new ProvisioningController(provisioningService); + + // Register verification, notification, and provisioning routes + verificationController.registerRoutes(expressApp); + notificationController.registerRoutes(expressApp); + provisioningController.registerRoutes(expressApp); + + // Start eVault Core (Fastify + GraphQL) with provisioning service first + await initializeEVault(provisioningService); + + // Start Express server for provisioning (after Fastify is ready) + expressApp.listen(expressPort, () => { + console.log(`Express server (Provisioning API) running on port ${expressPort}`); + }); + } catch (err) { + console.error(err); + process.exit(1); + } +}; + +start(); diff --git a/infrastructure/evault-provisioner/src/migrations/1748932757644-migration.ts b/infrastructure/evault-core/src/migrations/1748932757644-migration.ts similarity index 100% rename from infrastructure/evault-provisioner/src/migrations/1748932757644-migration.ts rename to infrastructure/evault-core/src/migrations/1748932757644-migration.ts diff --git a/infrastructure/evault-provisioner/src/migrations/1748966722767-migration.ts b/infrastructure/evault-core/src/migrations/1748966722767-migration.ts similarity index 100% rename from infrastructure/evault-provisioner/src/migrations/1748966722767-migration.ts rename to infrastructure/evault-core/src/migrations/1748966722767-migration.ts diff --git a/infrastructure/evault-provisioner/src/migrations/1748968097591-migration.ts b/infrastructure/evault-core/src/migrations/1748968097591-migration.ts similarity index 100% rename from infrastructure/evault-provisioner/src/migrations/1748968097591-migration.ts rename to infrastructure/evault-core/src/migrations/1748968097591-migration.ts diff --git a/infrastructure/evault-provisioner/src/migrations/1758389959600-migration.ts b/infrastructure/evault-core/src/migrations/1758389959600-migration.ts similarity index 100% rename from infrastructure/evault-provisioner/src/migrations/1758389959600-migration.ts rename to infrastructure/evault-core/src/migrations/1758389959600-migration.ts diff --git a/infrastructure/evault-provisioner/src/migrations/1759924736417-migration.ts b/infrastructure/evault-core/src/migrations/1759924736417-migration.ts similarity index 100% rename from infrastructure/evault-provisioner/src/migrations/1759924736417-migration.ts rename to infrastructure/evault-core/src/migrations/1759924736417-migration.ts diff --git a/infrastructure/evault-provisioner/src/migrations/1759926271076-migration.ts b/infrastructure/evault-core/src/migrations/1759926271076-migration.ts similarity index 100% rename from infrastructure/evault-provisioner/src/migrations/1759926271076-migration.ts rename to infrastructure/evault-core/src/migrations/1759926271076-migration.ts diff --git a/infrastructure/evault-core/src/secrets/secrets-store.ts b/infrastructure/evault-core/src/secrets/secrets-store.ts deleted file mode 100644 index 62ba8661..00000000 --- a/infrastructure/evault-core/src/secrets/secrets-store.ts +++ /dev/null @@ -1,126 +0,0 @@ -import { - createCipheriv, - createDecipheriv, - randomBytes, - pbkdf2Sync, -} from "crypto"; -import fs from "fs/promises"; -import path from "path"; -import { hexToUint8Array, uint8ArrayToHex } from "../utils/codec"; - -const ALGORITHM = "aes-256-gcm"; -const IV_LENGTH = 16; -const SALT_LENGTH = 32; -const TAG_LENGTH = 16; -const ITERATIONS = 100000; -const KEY_LENGTH = 32; - -interface StoredSeed { - encrypted: string; - iv: string; - salt: string; - nextKeyHash: string; -} - -export class SecretsStore { - private storePath: string; - private password: string; - - constructor(storePath: string, password: string) { - this.storePath = storePath; - this.password = password; - } - - private deriveKey(salt: Buffer): Buffer { - return pbkdf2Sync(this.password, salt, ITERATIONS, KEY_LENGTH, "sha256"); - } - - private async ensureStoreExists(): Promise { - try { - await fs.access(this.storePath); - } catch { - await fs.mkdir(path.dirname(this.storePath), { recursive: true }); - await fs.writeFile(this.storePath, JSON.stringify({})); - } - } - - private async readStore(): Promise> { - await this.ensureStoreExists(); - const content = await fs.readFile(this.storePath, "utf-8"); - return JSON.parse(content); - } - - private async writeStore(store: Record): Promise { - await fs.writeFile(this.storePath, JSON.stringify(store, null, 2)); - } - - private encrypt(data: Buffer): { - encrypted: string; - iv: string; - salt: string; - } { - const iv = randomBytes(IV_LENGTH); - const salt = randomBytes(SALT_LENGTH); - const key = this.deriveKey(salt); - const cipher = createCipheriv(ALGORITHM, key, iv); - const encrypted = Buffer.concat([ - cipher.update(data), - cipher.final(), - cipher.getAuthTag(), - ]); - return { - encrypted: uint8ArrayToHex(encrypted), - iv: uint8ArrayToHex(iv), - salt: uint8ArrayToHex(salt), - }; - } - - private decrypt(encrypted: string, iv: string, salt: string): Buffer { - const key = this.deriveKey(Buffer.from(hexToUint8Array(salt))); - const decipher = createDecipheriv( - ALGORITHM, - key, - Buffer.from(hexToUint8Array(iv)) - ); - const encryptedBuffer = Buffer.from(hexToUint8Array(encrypted)); - const tag = encryptedBuffer.slice(-TAG_LENGTH); - const data = encryptedBuffer.slice(0, -TAG_LENGTH); - decipher.setAuthTag(tag); - return Buffer.concat([decipher.update(data), decipher.final()]); - } - - public async storeSeed( - keyId: string, - seed: Uint8Array, - nextKeyHash: string - ): Promise { - const store = await this.readStore(); - const { encrypted, iv, salt } = this.encrypt(Buffer.from(seed)); - const storedSeed: StoredSeed = { encrypted, iv, salt, nextKeyHash }; - store[keyId] = JSON.stringify(storedSeed); - await this.writeStore(store); - } - - public async getSeed( - keyId: string - ): Promise<{ seed: Uint8Array; nextKeyHash: string }> { - const store = await this.readStore(); - const data: StoredSeed = JSON.parse(store[keyId]); - if (!data) throw new Error(`No seed found for key ${keyId}`); - return { - seed: this.decrypt(data.encrypted, data.iv, data.salt), - nextKeyHash: data.nextKeyHash, - }; - } - - public async deleteSeed(keyId: string): Promise { - const store = await this.readStore(); - delete store[keyId]; - await this.writeStore(store); - } - - public async listSeeds(): Promise { - const store = await this.readStore(); - return Object.keys(store); - } -} diff --git a/infrastructure/evault-core/src/services/NotificationService.spec.ts b/infrastructure/evault-core/src/services/NotificationService.spec.ts new file mode 100644 index 00000000..50d8b3a7 --- /dev/null +++ b/infrastructure/evault-core/src/services/NotificationService.spec.ts @@ -0,0 +1,302 @@ +import "reflect-metadata"; +import { describe, it, expect, beforeAll, afterAll, beforeEach } from "vitest"; +import { NotificationService } from "./NotificationService"; +import { Verification } from "../entities/Verification"; +import { Notification } from "../entities/Notification"; +import { setupTestDatabase, teardownTestDatabase } from "../test-utils/postgres-setup"; +import { DataSource } from "typeorm"; +import { Repository } from "typeorm"; + +describe("NotificationService", () => { + let dataSource: DataSource; + let notificationService: NotificationService; + let verificationRepository: Repository; + let notificationRepository: Repository; + + beforeAll(async () => { + const setup = await setupTestDatabase(); + dataSource = setup.dataSource; + verificationRepository = dataSource.getRepository(Verification); + notificationRepository = dataSource.getRepository(Notification); + notificationService = new NotificationService( + verificationRepository, + notificationRepository + ); + }); + + afterAll(async () => { + await teardownTestDatabase(); + }); + + beforeEach(async () => { + await verificationRepository.clear(); + await notificationRepository.clear(); + }); + + describe("registerDevice", () => { + it("should register new device", async () => { + const registration = { + eName: "test@example.com", + deviceId: "device-123", + platform: "android" as const, + fcmToken: "fcm-token-123", + registrationTime: new Date(), + }; + + const verification = await notificationService.registerDevice(registration); + + expect(verification).toBeDefined(); + expect(verification.linkedEName).toBe("test@example.com"); + expect(verification.deviceId).toBe("device-123"); + expect(verification.platform).toBe("android"); + expect(verification.fcmToken).toBe("fcm-token-123"); + expect(verification.deviceActive).toBe(true); + expect(verification.approved).toBe(true); + }); + + it("should update existing device registration", async () => { + const registration1 = { + eName: "test@example.com", + deviceId: "device-123", + platform: "android" as const, + registrationTime: new Date(), + }; + + await notificationService.registerDevice(registration1); + + const registration2 = { + ...registration1, + fcmToken: "new-fcm-token", + platform: "ios" as const, + }; + + const verification = await notificationService.registerDevice(registration2); + + expect(verification.deviceId).toBe("device-123"); + expect(verification.platform).toBe("ios"); + expect(verification.fcmToken).toBe("new-fcm-token"); + }); + }); + + describe("unregisterDevice", () => { + it("should mark device as inactive", async () => { + const registration = { + eName: "test@example.com", + deviceId: "device-123", + platform: "android" as const, + registrationTime: new Date(), + }; + + await notificationService.registerDevice(registration); + const success = await notificationService.unregisterDevice( + "test@example.com", + "device-123" + ); + + expect(success).toBe(true); + + const devices = await notificationService.getDevicesByEName("test@example.com"); + expect(devices).toHaveLength(0); + }); + + it("should return false when device does not exist", async () => { + const success = await notificationService.unregisterDevice( + "nonexistent@example.com", + "device-123" + ); + + expect(success).toBe(false); + }); + }); + + describe("getDevicesByEName", () => { + it("should retrieve active devices for eName", async () => { + await notificationService.registerDevice({ + eName: "test@example.com", + deviceId: "device-1", + platform: "android", + registrationTime: new Date(), + }); + + await notificationService.registerDevice({ + eName: "test@example.com", + deviceId: "device-2", + platform: "ios", + registrationTime: new Date(), + }); + + const devices = await notificationService.getDevicesByEName("test@example.com"); + + expect(devices).toHaveLength(2); + expect(devices.map(d => d.deviceId)).toContain("device-1"); + expect(devices.map(d => d.deviceId)).toContain("device-2"); + }); + + it("should not return inactive devices", async () => { + await notificationService.registerDevice({ + eName: "test@example.com", + deviceId: "device-1", + platform: "android", + registrationTime: new Date(), + }); + + await notificationService.unregisterDevice("test@example.com", "device-1"); + + const devices = await notificationService.getDevicesByEName("test@example.com"); + expect(devices).toHaveLength(0); + }); + }); + + describe("sendNotificationToEName", () => { + it("should create notification in DB when devices exist", async () => { + await notificationService.registerDevice({ + eName: "test@example.com", + deviceId: "device-1", + platform: "android", + registrationTime: new Date(), + }); + + const success = await notificationService.sendNotificationToEName( + "test@example.com", + { + title: "Test Notification", + body: "Test body", + data: { type: "test" }, + } + ); + + expect(success).toBe(true); + + const notifications = await notificationService.getUndeliveredNotifications("test@example.com"); + expect(notifications).toHaveLength(1); + expect(notifications[0].title).toBe("Test Notification"); + expect(notifications[0].body).toBe("Test body"); + }); + + it("should return false when no active devices found", async () => { + const success = await notificationService.sendNotificationToEName( + "nonexistent@example.com", + { + title: "Test", + body: "Test body", + } + ); + + expect(success).toBe(false); + }); + }); + + describe("getUndeliveredNotifications", () => { + it("should retrieve undelivered notifications", async () => { + await notificationService.registerDevice({ + eName: "test@example.com", + deviceId: "device-1", + platform: "android", + registrationTime: new Date(), + }); + + await notificationService.sendNotificationToEName("test@example.com", { + title: "Notification 1", + body: "Body 1", + }); + + await notificationService.sendNotificationToEName("test@example.com", { + title: "Notification 2", + body: "Body 2", + }); + + const notifications = await notificationService.getUndeliveredNotifications( + "test@example.com" + ); + + expect(notifications).toHaveLength(2); + expect(notifications[0].delivered).toBe(false); + }); + + it("should not return delivered notifications", async () => { + await notificationService.registerDevice({ + eName: "test@example.com", + deviceId: "device-1", + platform: "android", + registrationTime: new Date(), + }); + + await notificationService.sendNotificationToEName("test@example.com", { + title: "Notification", + body: "Body", + }); + + const notifications = await notificationService.getUndeliveredNotifications( + "test@example.com" + ); + expect(notifications).toHaveLength(1); + + await notificationService.markNotificationAsDelivered(notifications[0].id); + + const undelivered = await notificationService.getUndeliveredNotifications( + "test@example.com" + ); + expect(undelivered).toHaveLength(0); + }); + }); + + describe("markNotificationAsDelivered", () => { + it("should mark notification as delivered", async () => { + await notificationService.registerDevice({ + eName: "test@example.com", + deviceId: "device-1", + platform: "android", + registrationTime: new Date(), + }); + + await notificationService.sendNotificationToEName("test@example.com", { + title: "Test", + body: "Body", + }); + + const notifications = await notificationService.getUndeliveredNotifications( + "test@example.com" + ); + await notificationService.markNotificationAsDelivered(notifications[0].id); + + const notification = await notificationRepository.findOne({ + where: { id: notifications[0].id }, + }); + + expect(notification?.delivered).toBe(true); + expect(notification?.deliveredAt).toBeDefined(); + }); + }); + + describe("getDeviceStats", () => { + it("should aggregate device statistics", async () => { + await notificationService.registerDevice({ + eName: "test1@example.com", + deviceId: "device-1", + platform: "android", + registrationTime: new Date(), + }); + + await notificationService.registerDevice({ + eName: "test2@example.com", + deviceId: "device-2", + platform: "ios", + registrationTime: new Date(), + }); + + await notificationService.registerDevice({ + eName: "test3@example.com", + deviceId: "device-3", + platform: "android", + registrationTime: new Date(), + }); + + const stats = await notificationService.getDeviceStats(); + + expect(stats.totalDevices).toBe(3); + expect(stats.devicesByPlatform.android).toBe(2); + expect(stats.devicesByPlatform.ios).toBe(1); + }); + }); +}); + diff --git a/infrastructure/evault-provisioner/src/services/NotificationService.ts b/infrastructure/evault-core/src/services/NotificationService.ts similarity index 92% rename from infrastructure/evault-provisioner/src/services/NotificationService.ts rename to infrastructure/evault-core/src/services/NotificationService.ts index 6fe17cf2..2b0ede80 100644 --- a/infrastructure/evault-provisioner/src/services/NotificationService.ts +++ b/infrastructure/evault-core/src/services/NotificationService.ts @@ -29,16 +29,20 @@ export class NotificationService { ) {} async registerDevice(registration: DeviceRegistration): Promise { - // Check if verification already exists for this eName + // Check if verification already exists for this eName AND deviceId combination let verification = await this.verificationRepository.findOne({ - where: { linkedEName: registration.eName } + where: { + linkedEName: registration.eName, + deviceId: registration.deviceId + } }); if (verification) { // Update existing verification with device info - verification.deviceId = registration.deviceId; verification.platform = registration.platform; - verification.fcmToken = registration.fcmToken; + if (registration.fcmToken) { + verification.fcmToken = registration.fcmToken; + } verification.deviceActive = true; verification.updatedAt = new Date(); } else { diff --git a/infrastructure/evault-core/src/services/ProvisioningService.spec.ts b/infrastructure/evault-core/src/services/ProvisioningService.spec.ts new file mode 100644 index 00000000..eb2c60e1 --- /dev/null +++ b/infrastructure/evault-core/src/services/ProvisioningService.spec.ts @@ -0,0 +1,320 @@ +import "reflect-metadata"; +import { + describe, + it, + expect, + beforeAll, + afterAll, + beforeEach, + vi, +} from "vitest"; +import { + ProvisioningService, + ProvisionRequest, +} from "./ProvisioningService"; +import { VerificationService } from "./VerificationService"; +import { Verification } from "../entities/Verification"; +import { + setupTestDatabase, + teardownTestDatabase, +} from "../test-utils/postgres-setup"; +import { DataSource } from "typeorm"; +import { Repository } from "typeorm"; +import axios from "axios"; +import { + createMockRegistryServer, + stopMockRegistryServer, +} from "../test-utils/mock-registry-server"; +import { FastifyInstance } from "fastify"; +// Mock generateEntropy for testing - we'll create tokens manually +// import { generateEntropy } from "../../../platforms/registry/src/jwt"; + +// Mock axios +vi.mock("axios"); +const mockedAxios = axios as any; + +// Mock jose at module level +vi.mock("jose", async () => { + const actual = await vi.importActual("jose"); + return { + ...actual, + jwtVerify: vi + .fn() + .mockImplementation(async (token: string, jwks: any) => { + // For mock tokens, return mock payload with entropy + // Entropy should be a random string that W3ID can use to generate UUID v5 + if ( + token === "mock.jwt.token.here" || + token.includes("mock") || + (typeof token === "string" && token !== "invalid-token") + ) { + // Use a random hex string that will work with W3ID's UUID v5 generation + // W3ID uses UUID v5 which requires a namespace and a name (entropy) + return { + payload: { entropy: "test-entropy-1234567890abcdef" }, // Random string for UUID v5 + }; + } + // For invalid tokens, throw + if (token === "invalid-token") { + throw new Error("Invalid token"); + } + // Otherwise use actual verification (will likely fail) + return (actual as any).jwtVerify(token, jwks); + }), + }; +}); + +describe("ProvisioningService", () => { + let dataSource: DataSource; + let verificationService: VerificationService; + let provisioningService: ProvisioningService; + let verificationRepository: Repository; + let mockRegistryServer: FastifyInstance; + let registryUrl: string; + + beforeAll(async () => { + const setup = await setupTestDatabase(); + dataSource = setup.dataSource; + verificationRepository = dataSource.getRepository(Verification); + verificationService = new VerificationService(verificationRepository); + + // Start mock registry server + mockRegistryServer = await createMockRegistryServer(4322); + registryUrl = "http://localhost:4322"; + process.env.PUBLIC_REGISTRY_URL = registryUrl; + process.env.REGISTRY_SHARED_SECRET = "test-secret"; + + provisioningService = new ProvisioningService(verificationService); + }); + + afterAll(async () => { + await stopMockRegistryServer(mockRegistryServer); + await teardownTestDatabase(); + delete process.env.PUBLIC_REGISTRY_URL; + delete process.env.REGISTRY_SHARED_SECRET; + }); + + beforeEach(async () => { + await verificationRepository.clear(); + vi.clearAllMocks(); + + // Mock JWKS endpoint - always return valid JWKS for mock tokens + mockedAxios.get.mockImplementation(async (url: string) => { + if (url.includes("/.well-known/jwks.json")) { + return { + data: { + keys: [ + { + kty: "EC", + crv: "P-256", + x: "test-x", + y: "test-y", + kid: "entropy-key-1", + alg: "ES256", + }, + ], + }, + }; + } + throw new Error(`Unexpected URL: ${url}`); + }); + + // Note: jose mocking needs to be at module level, so we'll handle JWT errors in tests + }); + + const createValidRequest = async (): Promise => { + // Create a mock registry entropy token (JWT format) + // In real scenarios, this would be generated by the registry + // For testing, we'll create a simple token-like string + // The actual validation will be mocked via JWKS + const token = "mock.jwt.token.here"; + return { + registryEntropy: token, + namespace: "52258594-1cd2-45f0-90cc-d34a047edf4b", + verificationId: process.env.DEMO_CODE_W3DS || "d66b7138-538a-465f-a6ce-f6985854c3f4", + publicKey: "test-public-key", + }; + }; + + describe("provisionEVault - success path", () => { + it("should successfully provision eVault with valid demo code", async () => { + const request = await createValidRequest(); + + // Mock registry registration + mockedAxios.post.mockResolvedValueOnce({ + status: 201, + data: { success: true }, + }); + + const result = await provisioningService.provisionEVault(request); + + expect(result.success).toBe(true); + expect(result.w3id).toBeDefined(); + expect(result.uri).toBeDefined(); + expect(mockedAxios.post).toHaveBeenCalledWith( + expect.stringContaining("/register"), + expect.objectContaining({ + ename: result.w3id, + uri: result.uri, + }), + expect.any(Object) + ); + }); + + it("should successfully provision eVault with valid verification", async () => { + // Create approved verification + const verification = await verificationService.create({ + linkedEName: undefined, + approved: true, + consumed: false, + }); + + const request = await createValidRequest(); + request.verificationId = verification.id; + + // Mock registry registration + mockedAxios.post.mockResolvedValueOnce({ + status: 201, + data: { success: true }, + }); + + const result = await provisioningService.provisionEVault(request); + + expect(result.success).toBe(true); + expect(result.w3id).toBeDefined(); + + // Verify verification was updated and consumed + const updated = await verificationService.findById(verification.id); + expect(updated?.linkedEName).toBe(result.w3id); + expect(updated?.consumed).toBe(true); + }); + + it("should generate deterministic eName for same input variables", async () => { + const request1 = await createValidRequest(); + const request2 = await createValidRequest(); + + // Ensure both requests have identical inputs + request2.registryEntropy = request1.registryEntropy; + request2.namespace = request1.namespace; + request2.verificationId = request1.verificationId; + request2.publicKey = request1.publicKey; + + // Mock registry registration for both calls + mockedAxios.post.mockResolvedValue({ + status: 201, + data: { success: true }, + }); + + const result1 = await provisioningService.provisionEVault(request1); + const result2 = await provisioningService.provisionEVault(request2); + + expect(result1.success).toBe(true); + expect(result2.success).toBe(true); + expect(result1.w3id).toBeDefined(); + expect(result2.w3id).toBeDefined(); + // Same inputs should produce the same eName + expect(result1.w3id).toBe(result2.w3id); + }); + }); + + describe("provisionEVault - error cases", () => { + it("should fail with missing required fields", async () => { + const request: Partial = { + registryEntropy: "token", + // Missing other fields + }; + + const result = await provisioningService.provisionEVault( + request as ProvisionRequest + ); + + expect(result.success).toBe(false); + expect(result.error).toBeDefined(); + }); + + it("should fail with invalid registry entropy", async () => { + const request = await createValidRequest(); + request.registryEntropy = "invalid-token"; + + // Mock JWKS endpoint + mockedAxios.get.mockResolvedValueOnce({ + data: { + keys: [ + { + kty: "EC", + crv: "P-256", + x: "test-x", + y: "test-y", + kid: "entropy-key-1", + alg: "ES256", + }, + ], + }, + }); + + // JWT verification will fail with invalid token + const result = await provisioningService.provisionEVault(request); + + expect(result.success).toBe(false); + expect(result.message).toContain("Failed to provision"); + }); + + it("should fail when verification does not exist", async () => { + const request = await createValidRequest(); + request.verificationId = "non-existent-id"; + + const result = await provisioningService.provisionEVault(request); + + expect(result.success).toBe(false); + expect(result.message).toContain("verification doesn't exist"); + }); + + it("should fail when verification is not approved", async () => { + const verification = await verificationService.create({ + linkedEName: undefined, + approved: false, + consumed: false, + }); + + const request = await createValidRequest(); + request.verificationId = verification.id; + + const result = await provisioningService.provisionEVault(request); + + expect(result.success).toBe(false); + expect(result.message).toContain("verification not approved"); + }); + + it("should fail when verification is already consumed", async () => { + const verification = await verificationService.create({ + linkedEName: "existing@example.com", + approved: true, + consumed: true, + }); + + const request = await createValidRequest(); + request.verificationId = verification.id; + + const result = await provisioningService.provisionEVault(request); + + expect(result.success).toBe(false); + expect(result.message).toContain("already been used"); + }); + + it("should fail when registry registration fails", async () => { + const request = await createValidRequest(); + + mockedAxios.post.mockRejectedValueOnce({ + response: { + status: 500, + data: { error: "Internal server error" }, + }, + }); + + const result = await provisioningService.provisionEVault(request); + + expect(result.success).toBe(false); + expect(result.message).toContain("Failed to provision"); + }); + }); +}); diff --git a/infrastructure/evault-core/src/services/ProvisioningService.ts b/infrastructure/evault-core/src/services/ProvisioningService.ts new file mode 100644 index 00000000..bf78f035 --- /dev/null +++ b/infrastructure/evault-core/src/services/ProvisioningService.ts @@ -0,0 +1,221 @@ +import axios, { type AxiosError } from "axios"; +import * as jose from "jose"; +import { validate as uuidValidate } from "uuid"; +import { W3IDBuilder } from "w3id"; +import type { VerificationService } from "./VerificationService"; + +export interface ProvisionRequest { + registryEntropy: string; + namespace: string; + verificationId: string; + publicKey: string; +} + +export interface ProvisionResponse { + success: boolean; + uri?: string; + w3id?: string; + message?: string; + error?: string | unknown; +} + +export class ProvisioningService { + constructor(private verificationService: VerificationService) {} + + /** + * Provisions a new eVault logically (no infrastructure creation) + * @param request - Provision request containing registryEntropy, namespace, verificationId, and publicKey + * @returns Provision response with w3id (eName) and URI + */ + async provisionEVault( + request: ProvisionRequest, + ): Promise { + try { + if (!process.env.PUBLIC_REGISTRY_URL) { + throw new Error("PUBLIC_REGISTRY_URL is not set"); + } + + const { registryEntropy, namespace, verificationId, publicKey } = + request; + + if ( + !registryEntropy || + !namespace || + !verificationId || + !publicKey + ) { + return { + success: false, + error: "Missing required fields", + message: + "Missing required fields: registryEntropy, namespace, verificationId, publicKey", + }; + } + + // Verify the registry entropy token + let payload: any; + try { + const jwksResponse = await axios.get( + new URL( + `/.well-known/jwks.json`, + process.env.PUBLIC_REGISTRY_URL, + ).toString(), + ); + + const JWKS = jose.createLocalJWKSet(jwksResponse.data); + const verified = await jose.jwtVerify(registryEntropy, JWKS); + payload = verified.payload; + } catch (jwtError) { + // If JWT verification fails, re-throw with a clearer message + // but preserve the original error for debugging + throw new Error( + `JWT verification failed: ${ + jwtError instanceof Error + ? jwtError.message + : String(jwtError) + }`, + ); + } + + if (!uuidValidate(namespace)) { + return { + success: false, + error: "Invalid namespace", + message: "Namespace must be a valid UUID", + }; + } + + let w3id: string; + try { + const userId = await new W3IDBuilder() + .withNamespace(namespace) + .withEntropy(payload.entropy as string) + .withGlobal(true) + .build(); + w3id = userId.id; + } catch (w3idError) { + // If W3ID generation fails, it's likely an entropy format issue + // Re-throw with clearer message, but let verification errors take precedence + throw new Error( + `Failed to generate W3ID from entropy: ${ + w3idError instanceof Error + ? w3idError.message + : String(w3idError) + }`, + ); + } + + // Validate verification if not demo code + const demoCode = process.env.DEMO_CODE_W3DS || "d66b7138-538a-465f-a6ce-f6985854c3f4"; + if (verificationId !== demoCode) { + let verification; + try { + verification = + await this.verificationService.findById(verificationId); + } catch (dbError) { + // If database query fails (e.g., invalid UUID format), treat as verification not found + throw new Error("verification doesn't exist"); + } + if (!verification) { + throw new Error("verification doesn't exist"); + } + if (!verification.approved) { + throw new Error("verification not approved"); + } + if (verification.consumed) { + throw new Error("already been used"); + } + } + + // Update verification with linked eName (only if not demo code) + if (verificationId !== demoCode) { + try { + await this.verificationService.findByIdAndUpdate( + verificationId, + { + linkedEName: w3id, + consumed: true, + }, + ); + } catch (updateError) { + // If update fails, it means verification doesn't exist (should have been caught above, but handle gracefully) + throw new Error("verification doesn't exist"); + } + } + + // Generate evault ID (doesn't need entropy, generates random) + let evaultId: { id: string }; + try { + evaultId = await new W3IDBuilder().withGlobal(true).build(); + } catch (evaultIdError) { + throw new Error( + `Failed to generate evault ID: ${ + evaultIdError instanceof Error + ? evaultIdError.message + : String(evaultIdError) + }`, + ); + } + + // Build URI (IP:PORT format pointing to shared service) + const fastifyPort = + process.env.FASTIFY_PORT || process.env.PORT || 4000; + const baseUri = + process.env.EVAULT_BASE_URI || + `http://${ + process.env.EVAULT_HOST || "localhost" + }:${fastifyPort}`; + const uri = baseUri; + + // Register in registry + await axios.post( + new URL( + "/register", + process.env.PUBLIC_REGISTRY_URL, + ).toString(), + { + ename: w3id, + uri, + evault: evaultId.id, + }, + { + headers: { + Authorization: `Bearer ${process.env.REGISTRY_SHARED_SECRET}`, + }, + }, + ); + + return { + success: true, + w3id, + uri, + }; + } catch (error) { + const axiosError = error as AxiosError; + const errorMessage = + error instanceof Error ? error.message : String(error); + console.error("Provisioning error:", error); + + // Preserve specific verification-related error messages, otherwise use generic message + const verificationErrors = [ + "verification doesn't exist", + "verification not approved", + "already been used", + "PUBLIC_REGISTRY_URL", + ]; + + const isVerificationError = verificationErrors.some((err) => + errorMessage.includes(err), + ); + const message = isVerificationError + ? errorMessage + : "Failed to provision evault instance"; + + return { + success: false, + error: axiosError.response?.data || errorMessage, + message, + }; + } + } +} diff --git a/infrastructure/evault-core/src/services/VerificationService.spec.ts b/infrastructure/evault-core/src/services/VerificationService.spec.ts new file mode 100644 index 00000000..f3950826 --- /dev/null +++ b/infrastructure/evault-core/src/services/VerificationService.spec.ts @@ -0,0 +1,135 @@ +import "reflect-metadata"; +import { describe, it, expect, beforeAll, afterAll, beforeEach } from "vitest"; +import { Repository } from "typeorm"; +import { VerificationService } from "./VerificationService"; +import { Verification } from "../entities/Verification"; +import { setupTestDatabase, teardownTestDatabase } from "../test-utils/postgres-setup"; +import { DataSource } from "typeorm"; + +describe("VerificationService", () => { + let dataSource: DataSource; + let verificationService: VerificationService; + let verificationRepository: Repository; + + beforeAll(async () => { + const setup = await setupTestDatabase(); + dataSource = setup.dataSource; + verificationRepository = dataSource.getRepository(Verification); + verificationService = new VerificationService(verificationRepository); + }); + + afterAll(async () => { + await teardownTestDatabase(); + }); + + beforeEach(async () => { + await verificationRepository.clear(); + }); + + describe("create", () => { + it("should create verification record", async () => { + const verification = await verificationService.create({ + linkedEName: "test@example.com", + deviceId: "device-123", + platform: "android", + approved: true, + consumed: false, + }); + + expect(verification).toBeDefined(); + expect(verification.linkedEName).toBe("test@example.com"); + expect(verification.deviceId).toBe("device-123"); + expect(verification.approved).toBe(true); + expect(verification.consumed).toBe(false); + expect(verification.id).toBeDefined(); + }); + }); + + describe("findById", () => { + it("should find verification by ID when it exists", async () => { + const created = await verificationService.create({ + linkedEName: "test@example.com", + approved: true, + consumed: false, + }); + + const found = await verificationService.findById(created.id); + + expect(found).toBeDefined(); + expect(found?.linkedEName).toBe("test@example.com"); + }); + + it("should return null when verification does not exist", async () => { + // Use a valid UUID format that doesn't exist + const found = await verificationService.findById("00000000-0000-0000-0000-000000000000"); + expect(found).toBeNull(); + }); + }); + + describe("findByIdAndUpdate", () => { + it("should update verification properties", async () => { + const created = await verificationService.create({ + linkedEName: "test@example.com", + approved: false, + consumed: false, + }); + + const updated = await verificationService.findByIdAndUpdate(created.id, { + approved: true, + consumed: true, + }); + + expect(updated).toBeDefined(); + expect(updated?.approved).toBe(true); + expect(updated?.consumed).toBe(true); + }); + + it("should update linkedEName", async () => { + const created = await verificationService.create({ + linkedEName: "old@example.com", + approved: true, + consumed: false, + }); + + const updated = await verificationService.findByIdAndUpdate(created.id, { + linkedEName: "new@example.com", + }); + + expect(updated?.linkedEName).toBe("new@example.com"); + }); + + it("should return null when updating non-existent verification", async () => { + // Use a valid UUID format that doesn't exist in the database + const updated = await verificationService.findByIdAndUpdate("00000000-0000-0000-0000-000000000000", { + approved: true, + }); + + expect(updated).toBeNull(); + }); + }); + + describe("findOne", () => { + it("should find verification by conditions", async () => { + await verificationService.create({ + linkedEName: "test1@example.com", + approved: true, + consumed: false, + }); + + const found = await verificationService.findOne({ + linkedEName: "test1@example.com", + }); + + expect(found).toBeDefined(); + expect(found?.linkedEName).toBe("test1@example.com"); + }); + + it("should return null when no match found", async () => { + const found = await verificationService.findOne({ + linkedEName: "nonexistent@example.com", + }); + expect(found).toBeNull(); + }); + }); +}); + diff --git a/infrastructure/evault-core/src/services/VerificationService.ts b/infrastructure/evault-core/src/services/VerificationService.ts new file mode 100644 index 00000000..a6230d82 --- /dev/null +++ b/infrastructure/evault-core/src/services/VerificationService.ts @@ -0,0 +1,58 @@ +import { DeepPartial, Repository } from "typeorm"; +import { Verification } from "../entities/Verification"; + +export class VerificationService { + constructor( + private readonly verificationRepository: Repository, + ) {} + + async create(data: Partial): Promise { + const verification = this.verificationRepository.create(data); + return await this.verificationRepository.save(verification); + } + + async findById(id: string): Promise { + return await this.verificationRepository.findOneBy({ id }); + } + + async findByIdAndUpdate( + id: string, + data: DeepPartial, + ): Promise { + try { + const current = await this.findById(id); + if (!current) { + return null; + } + const toSave = this.verificationRepository.create({ + ...current, + ...data, + }); + + const updated = await this.verificationRepository.save(toSave); + return updated; + } catch (error) { + // If findById throws an error (e.g., invalid UUID format), return null + return null; + } + } + + async findOne(where: Partial): Promise { + return await this.verificationRepository.findOneBy(where); + } + + async findManyAndCount( + where: Partial, + relations: Record = {}, + order: Record = {}, + pagination: { take: number; skip: number } = { take: 10, skip: 0 }, + ): Promise<[Verification[], number]> { + return await this.verificationRepository.findAndCount({ + where, + relations, + order, + take: pagination.take, + skip: pagination.skip, + }); + } +} diff --git a/infrastructure/evault-core/src/test-utils/mock-registry-server.ts b/infrastructure/evault-core/src/test-utils/mock-registry-server.ts new file mode 100644 index 00000000..d9830d7c --- /dev/null +++ b/infrastructure/evault-core/src/test-utils/mock-registry-server.ts @@ -0,0 +1,54 @@ +import fastify, { FastifyInstance } from "fastify"; +// Mock getJWK - we don't need to import from registry in tests +async function mockGetJWK() { + return { + keys: [{ + kty: "EC", + crv: "P-256", + x: "test-x", + y: "test-y", + kid: "entropy-key-1", + alg: "ES256", + }], + }; +} + +export async function createMockRegistryServer(port: number = 4322): Promise { + const server = fastify({ logger: false }); + + // Mock endpoints that evault-core calls + server.get("/.well-known/jwks.json", async () => { + return await mockGetJWK(); + }); + + server.post("/register", async (request, reply) => { + const authHeader = request.headers.authorization; + if (!authHeader || !authHeader.startsWith("Bearer ")) { + return reply.status(401).send({ error: "Unauthorized" }); + } + + const { ename, uri, evault } = request.body as any; + if (!ename || !uri || !evault) { + return reply.status(400).send({ error: "Missing required fields" }); + } + return reply.status(201).send({ ename, uri, evault }); + }); + + server.get("/platforms", async () => { + return [ + "http://localhost:1111", + "http://localhost:3000", + ]; + }); + + await server.listen({ port, host: "0.0.0.0" }); + + return server; +} + +export async function stopMockRegistryServer(server: FastifyInstance | undefined): Promise { + if (server) { + await server.close(); + } +} + diff --git a/infrastructure/evault-core/src/test-utils/neo4j-setup.ts b/infrastructure/evault-core/src/test-utils/neo4j-setup.ts new file mode 100644 index 00000000..57fbdfa4 --- /dev/null +++ b/infrastructure/evault-core/src/test-utils/neo4j-setup.ts @@ -0,0 +1,34 @@ +import neo4j, { Driver } from "neo4j-driver"; +import { Neo4jContainer, StartedNeo4jContainer } from "@testcontainers/neo4j"; + +let container: StartedNeo4jContainer | null = null; +let driver: Driver | null = null; + +export async function setupTestNeo4j(): Promise<{ container: StartedNeo4jContainer; driver: Driver }> { + if (container && driver) { + return { container, driver }; + } + + container = await new Neo4jContainer("neo4j:5.15").start(); + + const username = container.getUsername(); + const password = container.getPassword(); + const boltPort = container.getMappedPort(7687); + const uri = `bolt://localhost:${boltPort}`; + + driver = neo4j.driver(uri, neo4j.auth.basic(username, password)); + + return { container, driver }; +} + +export async function teardownTestNeo4j(): Promise { + if (driver) { + await driver.close(); + driver = null; + } + if (container) { + await container.stop(); + container = null; + } +} + diff --git a/infrastructure/evault-core/src/test-utils/postgres-setup.ts b/infrastructure/evault-core/src/test-utils/postgres-setup.ts new file mode 100644 index 00000000..5635522a --- /dev/null +++ b/infrastructure/evault-core/src/test-utils/postgres-setup.ts @@ -0,0 +1,46 @@ +import "reflect-metadata"; +import { DataSource } from "typeorm"; +import { PostgreSqlContainer, StartedPostgreSqlContainer } from "@testcontainers/postgresql"; +import { Verification } from "../entities/Verification"; +import { Notification } from "../entities/Notification"; + +let container: StartedPostgreSqlContainer | null = null; +let dataSource: DataSource | null = null; + +export async function setupTestDatabase(): Promise<{ container: StartedPostgreSqlContainer; dataSource: DataSource }> { + if (container && dataSource?.isInitialized) { + return { container, dataSource }; + } + + container = await new PostgreSqlContainer("postgres:15-alpine") + .withDatabase("test_evault") + .withUsername("test") + .withPassword("test") + .start(); + + const connectionUrl = container.getConnectionUri(); + + dataSource = new DataSource({ + type: "postgres", + url: connectionUrl, + synchronize: true, + logging: false, + entities: [Verification, Notification], + }); + + await dataSource.initialize(); + + return { container, dataSource }; +} + +export async function teardownTestDatabase(): Promise { + if (dataSource?.isInitialized) { + await dataSource.destroy(); + dataSource = null; + } + if (container) { + await container.stop(); + container = null; + } +} + diff --git a/infrastructure/evault-core/src/test-utils/test-setup.ts b/infrastructure/evault-core/src/test-utils/test-setup.ts new file mode 100644 index 00000000..0f463974 --- /dev/null +++ b/infrastructure/evault-core/src/test-utils/test-setup.ts @@ -0,0 +1,7 @@ +// Import reflect-metadata for TypeORM decorators +import "reflect-metadata"; + +// Configure testcontainers to use Docker socket directly +process.env.TESTCONTAINERS_DOCKER_SOCKET_OVERRIDE = process.env.TESTCONTAINERS_DOCKER_SOCKET_OVERRIDE || "/var/run/docker.sock"; +process.env.TESTCONTAINERS_RYUK_DISABLED = process.env.TESTCONTAINERS_RYUK_DISABLED || "false"; + diff --git a/infrastructure/evault-provisioner/src/utils/eventEmitter.ts b/infrastructure/evault-core/src/utils/eventEmitter.ts similarity index 100% rename from infrastructure/evault-provisioner/src/utils/eventEmitter.ts rename to infrastructure/evault-core/src/utils/eventEmitter.ts diff --git a/infrastructure/evault-provisioner/src/utils/hmac.ts b/infrastructure/evault-core/src/utils/hmac.ts similarity index 100% rename from infrastructure/evault-provisioner/src/utils/hmac.ts rename to infrastructure/evault-core/src/utils/hmac.ts diff --git a/infrastructure/evault-core/tests/log-storage.spec.ts b/infrastructure/evault-core/tests/log-storage.spec.ts deleted file mode 100644 index 91abaed9..00000000 --- a/infrastructure/evault-core/tests/log-storage.spec.ts +++ /dev/null @@ -1,85 +0,0 @@ -import { describe, it, expect, beforeAll, afterAll } from "vitest"; -import neo4j, { Driver } from "neo4j-driver"; -import { Neo4jContainer } from "@testcontainers/neo4j"; -import { Neo4jLogStorage } from "../src/w3id/log-storage"; -import { LogEvent } from "w3id"; - -describe("Neo4jLogStorage", () => { - let container; - let driver: Driver; - let storage: Neo4jLogStorage; - - beforeAll(async () => { - container = await new Neo4jContainer("neo4j:5.15").start(); - const uri = `bolt://localhost:${container.getMappedPort(7687)}`; - driver = neo4j.driver( - uri, - neo4j.auth.basic(container.getUsername(), container.getPassword()) - ); - storage = new Neo4jLogStorage(driver); - }); - - afterAll(async () => { - await driver.close(); - await container.stop(); - }); - - it("should create and retrieve a log event", async () => { - const logEvent: LogEvent = { - id: "test-id", - versionId: "0-test", - versionTime: new Date(), - updateKeys: ["key1", "key2"], - nextKeyHashes: ["hash1", "hash2"], - method: "w3id:v0.0.0", - }; - - const created = await storage.create(logEvent); - expect(created.id).toBe(logEvent.id); - expect(created.versionId).toBe(logEvent.versionId); - expect(created.updateKeys).toEqual(logEvent.updateKeys); - expect(created.nextKeyHashes).toEqual(logEvent.nextKeyHashes); - expect(created.method).toBe(logEvent.method); - - const retrieved = await storage.findOne({ id: logEvent.id }); - expect(retrieved.id).toBe(logEvent.id); - expect(retrieved.versionId).toBe(logEvent.versionId); - expect(retrieved.updateKeys).toEqual(logEvent.updateKeys); - expect(retrieved.nextKeyHashes).toEqual(logEvent.nextKeyHashes); - expect(retrieved.method).toBe(logEvent.method); - }); - - it("should find multiple log events", async () => { - const logEvent1: LogEvent = { - id: "test-id-1", - versionId: "0-test-1", - versionTime: new Date(), - updateKeys: ["key1"], - nextKeyHashes: ["hash1"], - method: "w3id:v0.0.0", - }; - - const logEvent2: LogEvent = { - id: "test-id-2", - versionId: "0-test-2", - versionTime: new Date(), - updateKeys: ["key2"], - nextKeyHashes: ["hash2"], - method: "w3id:v0.0.0", - }; - - await storage.create(logEvent1); - await storage.create(logEvent2); - - const events = await storage.findMany({ method: "w3id:v0.0.0" }); - expect(events.length).toBeGreaterThanOrEqual(2); - expect(events.some((e) => e.id === logEvent1.id)).toBe(true); - expect(events.some((e) => e.id === logEvent2.id)).toBe(true); - }); - - it("should throw error when log event not found", async () => { - await expect(storage.findOne({ id: "non-existent-id" })).rejects.toThrow( - "No log event found with id non-existent-id" - ); - }); -}); diff --git a/infrastructure/evault-core/tests/utils/mock-signer.ts b/infrastructure/evault-core/tests/utils/mock-signer.ts deleted file mode 100644 index 995827b4..00000000 --- a/infrastructure/evault-core/tests/utils/mock-signer.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Signer } from "../../src/types/w3id"; - -export function createMockSigner(alg: string = "ed25519"): Signer { - return { - sign: async (message: string): Promise => { - // Mock signature - in a real implementation this would be a proper signature - return Buffer.from(message).toString("base64url"); - }, - pubKey: "mock-public-key", - alg, - }; -} diff --git a/infrastructure/evault-core/tests/utils/mock-storage.ts b/infrastructure/evault-core/tests/utils/mock-storage.ts deleted file mode 100644 index 2a7f4b8b..00000000 --- a/infrastructure/evault-core/tests/utils/mock-storage.ts +++ /dev/null @@ -1,60 +0,0 @@ -import { StorageSpec } from "../../src/types/w3id"; - -export class MockStorage implements StorageSpec { - private store: Map = new Map(); - private dataStore: Map = new Map(); - - async get(key: string): Promise { - return this.store.get(key) ?? null; - } - - async set(key: string, value: string): Promise { - this.store.set(key, value); - } - - async delete(key: string): Promise { - this.store.delete(key); - } - - async list(prefix: string): Promise { - return Array.from(this.store.keys()).filter((key) => - key.startsWith(prefix), - ); - } - - async create(data: T): Promise { - const id = Math.random().toString(36).substring(7); - this.dataStore.set(id, data); - return data as unknown as U; - } - - async findOne(query: Partial): Promise { - for (const [_, data] of this.dataStore) { - if (this.matchesQuery(data, query)) { - return data as unknown as U; - } - } - return null; - } - - async findMany(query: Partial): Promise { - const results: U[] = []; - for (const [_, data] of this.dataStore) { - if (this.matchesQuery(data, query)) { - results.push(data as unknown as U); - } - } - return results; - } - - private matchesQuery(data: T, query: Partial): boolean { - return Object.entries(query).every(([key, value]) => { - return (data as any)[key] === value; - }); - } - - clear(): void { - this.store.clear(); - this.dataStore.clear(); - } -} diff --git a/infrastructure/evault-core/tsconfig.json b/infrastructure/evault-core/tsconfig.json index 826c6392..87ff34e2 100644 --- a/infrastructure/evault-core/tsconfig.json +++ b/infrastructure/evault-core/tsconfig.json @@ -1,18 +1,19 @@ { - "compilerOptions": { - "target": "ES2017", - "module": "ESNext", - "lib": ["ESNext", "DOM"], - "declaration": true, - "declarationDir": "./dist/types", - "outDir": "./dist", - "rootDir": "./src", - "strict": true, - "esModuleInterop": true, - "forceConsistentCasingInFileNames": true, - "moduleResolution": "Node", - "skipLibCheck": true - }, - "include": ["src/**/*"], - "exclude": ["node_modules", "dist"] -} \ No newline at end of file + "compilerOptions": { + "target": "ES2020", + "module": "commonjs", + "moduleResolution": "node", + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "outDir": "dist", + "rootDir": "src", + "sourceMap": true, + "declaration": true, + "experimentalDecorators": true, + "emitDecoratorMetadata": true + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist"] +} \ No newline at end of file diff --git a/infrastructure/evault-core/vitest.config.ts b/infrastructure/evault-core/vitest.config.ts index 0fb5c2c2..4ec0e3ba 100644 --- a/infrastructure/evault-core/vitest.config.ts +++ b/infrastructure/evault-core/vitest.config.ts @@ -1,11 +1,22 @@ -import { defineConfig } from "vitest/config"; +import { defineConfig } from 'vitest/config'; export default defineConfig({ test: { - globals: true, - environment: "node", - testTimeout: 60000, - hookTimeout: 60000, - watch: false, + glob: ['**/*.{test,spec}.{ts,tsx}'], + environment: 'node', + setupFiles: ['./src/test-utils/test-setup.ts'], + testTimeout: 120000, // 120 seconds for testcontainers + hookTimeout: 120000, // 120 seconds for hooks (beforeAll, afterAll) + coverage: { + provider: 'v8', + reporter: ['text', 'json', 'html'], + exclude: [ + 'node_modules/', + 'dist/', + '**/*.d.ts', + '**/migrations/**', + ], + }, }, }); + diff --git a/infrastructure/evault-provisioner/README.md b/infrastructure/evault-provisioner/README.md deleted file mode 100644 index 6546befb..00000000 --- a/infrastructure/evault-provisioner/README.md +++ /dev/null @@ -1,188 +0,0 @@ -# Evault Provisioner - -A TypeScript API for provisioning evault instances on Nomad. This service allows you to spin up evault instances with Neo4j backends for different tenants. - -## Prerequisites - -- Node.js 18+ -- Docker -- Nomad (see setup instructions below) -- OrbStack (for macOS users) - -## Nomad Setup - -### macOS Setup (using OrbStack) - -Due to CNI bridge plugin requirements, running Nomad on macOS is best done through OrbStack: - -1. Install OrbStack: https://orbstack.dev/ -2. Create a new VM in OrbStack -3. SSH into the VM and install Nomad: - -```bash -# Install Nomad -curl -fsSL https://apt.releases.hashicorp.com/gpg | sudo apt-key add - -sudo apt-add-repository "deb [arch=amd64] https://apt.releases.hashicorp.com $(lsb_release -cs) main" -sudo apt-get update && sudo apt-get install nomad - -# Install CNI plugins -sudo mkdir -p /opt/cni/bin -curl -L https://github.com/containernetworking/plugins/releases/download/v1.3.0/cni-plugins-linux-amd64-v1.3.0.tgz | sudo tar -C /opt/cni/bin -xz -``` - -4. Start Nomad in dev mode: - -```bash -sudo nomad agent -dev -network-interface=eth0 -log-level=DEBUG -bind=0.0.0.0 -``` - -### Linux Setup - -1. Install Nomad: - -```bash -# Install Nomad -curl -fsSL https://apt.releases.hashicorp.com/gpg | sudo apt-key add - -sudo apt-add-repository "deb [arch=amd64] https://apt.releases.hashicorp.com $(lsb_release -cs) main" -sudo apt-get update && sudo apt-get install nomad - -# Install CNI plugins -sudo mkdir -p /opt/cni/bin -curl -L https://github.com/containernetworking/plugins/releases/download/v1.3.0/cni-plugins-linux-amd64-v1.3.0.tgz | sudo tar -C /opt/cni/bin -xz -``` - -2. Start Nomad in dev mode: - -```bash -sudo nomad agent -dev -network-interface=eth0 -log-level=DEBUG -bind=0.0.0.0 -``` - -## Project Setup - -1. Install dependencies: - -```bash -npm install -``` - -2. Build the project: - -```bash -npm run build -``` - -3. Start the server: - -```bash -npm start -``` - -For development with auto-reload: - -```bash -npm run dev -``` - -## API Endpoints - -### Health Check - -``` -GET /health -``` - -Returns the health status of the API. - -### Provision Evault - -``` -POST /provision -``` - -Provisions a new evault instance for a tenant. - -Request body: - -```json -{ - "tenantId": "your-tenant-id" -} -``` - -Response: - -```json -{ - "success": true, - "message": "Successfully provisioned evault for tenant your-tenant-id", - "jobName": "evault-your-tenant-id" -} -``` - -## Architecture - -The provisioner creates a Nomad job that consists of two tasks: - -1. **Neo4j Task**: - - - Runs Neo4j 5.15 - - Exposes ports: 7687 (bolt) and 7474 (browser) - - Uses dynamic ports for flexibility - - 2GB memory allocation - -2. **Evault Task**: - - Runs the evault application - - Connects to Neo4j via localhost - - Uses dynamic port allocation - - 512MB memory allocation - - Depends on Neo4j task - -## Environment Variables - -- `PORT` - Port to run the API on (default: 3000) -- `NOMAD_ADDR` - Nomad API address (default: http://localhost:4646) - -## Troubleshooting - -### Common Issues - -1. **Port Allocation Issues**: - - - Ensure Nomad is running with CNI plugins installed - - Check that the network interface is correctly specified - - Verify that ports are not already in use - -2. **Container Networking**: - - - Ensure Docker is running - - Check that the bridge network is properly configured - - Verify container-to-container communication - -3. **Nomad Job Failures**: - - Check Nomad logs for detailed error messages - - Verify that all required images are available - - Ensure resource allocations are sufficient - -### Debugging - -To debug Nomad issues: - -```bash -# View Nomad logs -journalctl -u nomad -f - -# Check Nomad status -nomad status - -# View specific job details -nomad job status evault- - -# View allocation details -nomad alloc status -``` - -## Development - -The project uses TypeScript for type safety and better development experience. The source files are in the `src` directory and are compiled to the `dist` directory. - -For development, you can use `npm run dev` which uses `tsx` to run the TypeScript files directly without compilation. diff --git a/infrastructure/evault-provisioner/package.json b/infrastructure/evault-provisioner/package.json deleted file mode 100644 index ffe10dd9..00000000 --- a/infrastructure/evault-provisioner/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "evault-provisioner", - "version": "1.0.0", - "description": "API for provisioning evault instances on Nomad", - "main": "dist/index.js", - "scripts": { - "start": "node dist/index.js", - "dev": "ts-node-dev --respawn --transpile-only src/index.ts", - "build": "tsc", - "test": "vitest", - "typeorm": "typeorm-ts-node-commonjs", - "migration:generate": "npm run typeorm migration:generate -- -d src/config/database.ts", - "migration:run": "npm run typeorm migration:run -- -d src/config/database.ts", - "migration:revert": "npm run typeorm migration:revert -- -d src/config/database.ts" - }, - "dependencies": { - "@kubernetes/client-node": "^1.3.0", - "axios": "^1.6.7", - "cors": "^2.8.5", - "dotenv": "^16.4.5", - "express": "^4.18.2", - "jose": "^5.2.2", - "pg": "^8.11.3", - "reflect-metadata": "^0.2.1", - "sha256": "^0.2.0", - "typeorm": "^0.3.24", - "w3id": "workspace:*" - }, - "devDependencies": { - "@types/cors": "^2.8.18", - "@types/express": "^4.17.21", - "@types/node": "^20.11.24", - "@types/sha256": "^0.2.2", - "nodemon": "^3.0.3", - "ts-node-dev": "^2.0.0", - "tsx": "^4.7.1", - "typescript": "^5.3.3", - "vitest": "^1.3.1" - } -} diff --git a/infrastructure/evault-provisioner/src/index.ts b/infrastructure/evault-provisioner/src/index.ts deleted file mode 100644 index 3779396a..00000000 --- a/infrastructure/evault-provisioner/src/index.ts +++ /dev/null @@ -1,192 +0,0 @@ -import "reflect-metadata"; -import express, { Request, Response } from "express"; -import axios, { AxiosError } from "axios"; -import { provisionEVault } from "./templates/evault.nomad"; -import dotenv from "dotenv"; -import { W3IDBuilder } from "w3id"; -import * as jose from "jose"; -import path from "path"; -import { createHmacSignature } from "./utils/hmac"; -import cors from "cors"; -import { AppDataSource } from "./config/database"; -import { VerificationService } from "./services/VerificationService"; -import { VerificationController } from "./controllers/VerificationController"; -import { NotificationController } from "./controllers/NotificationController"; - -dotenv.config({ path: path.resolve(__dirname, "../../../.env") }); - -const app = express(); -const port = process.env.PORT || 3001; - -// Configure CORS for SSE -app.use( - cors({ - origin: "*", - methods: ["GET", "POST", "OPTIONS", "PATCH"], - allowedHeaders: ["Content-Type", "Authorization"], - credentials: true, - }) -); - -// Increase JSON payload limit to 50MB -app.use(express.json({ limit: "50mb" })); -// Increase URL-encoded payload limit to 50MB -app.use(express.urlencoded({ limit: "50mb", extended: true })); - -// Initialize database connection -const initializeDatabase = async () => { - try { - await AppDataSource.initialize(); - console.log("Database connection initialized"); - } catch (error) { - console.error("Error during database initialization:", error); - process.exit(1); - } -}; - -// Initialize services and controllers -const verificationService = new VerificationService( - AppDataSource.getRepository("Verification") -); -const verificationController = new VerificationController(verificationService); -const notificationController = new NotificationController(); - -interface ProvisionRequest { - registryEntropy: string; - namespace: string; - verificationId: string; - publicKey: string; -} - -interface ProvisionResponse { - success: boolean; - uri?: string; - w3id?: string; - message?: string; - error?: string | unknown; -} - -// Health check endpoint -app.get("/health", (req: Request, res: Response) => { - res.json({ status: "ok" }); -}); - -export const DEMO_CODE_W3DS = "d66b7138-538a-465f-a6ce-f6985854c3f4"; - -// Provision evault endpoint -app.post( - "/provision", - async ( - req: Request<{}, {}, ProvisionRequest>, - res: Response - ) => { - try { - console.log("provisioner log 1"); - if (!process.env.PUBLIC_REGISTRY_URL) - throw new Error("PUBLIC_REGISTRY_URL is not set"); - const { registryEntropy, namespace, verificationId, publicKey } = req.body; - if (!registryEntropy || !namespace || !verificationId || !publicKey) { - return res.status(400).json({ - success: false, - error: "registryEntropy and namespace are required", - message: - "Missing required fields: registryEntropy, namespace, verifficationId, publicKey", - }); - } - - console.log("provisioner log 2"); - - const jwksResponse = await axios.get( - new URL( - `/.well-known/jwks.json`, - process.env.PUBLIC_REGISTRY_URL - ).toString() - ); - - const JWKS = jose.createLocalJWKSet(jwksResponse.data); - const { payload } = await jose.jwtVerify(registryEntropy, JWKS); - - console.log("provisioner log 3"); - - const userId = await new W3IDBuilder() - .withNamespace(namespace) - .withEntropy(payload.entropy as string) - .withGlobal(true) - .build(); - - const w3id = userId.id; - - if (verificationId !== DEMO_CODE_W3DS) { - const verification = await verificationService.findById( - verificationId - ); - if (!verification) - throw new Error("verification doesn't exist"); - if (!verification.approved) - throw new Error("verification not approved"); - if (verification.consumed) - throw new Error( - "This verification ID has already been used" - ); - } - await verificationService.findByIdAndUpdate(verificationId, { linkedEName: w3id }); - const evaultId = await new W3IDBuilder().withGlobal(true).build(); - const uri = await provisionEVault( - w3id, - process.env.PUBLIC_REGISTRY_URL, - publicKey - ); - await axios.post( - new URL( - "/register", - process.env.PUBLIC_REGISTRY_URL - ).toString(), - { - ename: w3id, - uri, - evault: evaultId.id, - }, - { - headers: { - Authorization: `Bearer ${process.env.REGISTRY_SHARED_SECRET}`, - }, - } - ); - - res.json({ - success: true, - w3id, - uri, - }); - } catch (error) { - const axiosError = error as AxiosError; - console.error(error); - res.status(500).json({ - success: false, - error: axiosError.response?.data || axiosError.message, - message: "Failed to provision evault instance", - }); - } - } -); - -// Register verification routes -verificationController.registerRoutes(app); - -// Register notification routes -notificationController.registerRoutes(app); - -// Start the server -const start = async () => { - try { - await initializeDatabase(); - app.listen(port, () => { - console.log(`Evault Provisioner API running on port ${port}`); - }); - } catch (err) { - console.error(err); - process.exit(1); - } -}; - -start(); diff --git a/infrastructure/evault-provisioner/src/templates/evault.nomad.ts b/infrastructure/evault-provisioner/src/templates/evault.nomad.ts deleted file mode 100644 index 8fe41048..00000000 --- a/infrastructure/evault-provisioner/src/templates/evault.nomad.ts +++ /dev/null @@ -1,227 +0,0 @@ -import sha256 from "sha256"; -import * as k8s from "@kubernetes/client-node"; -import { execSync } from "child_process"; -import { json } from "express"; - -/** - * Generates a cryptographically secure random alphanumeric password of the specified length. - * - * @param length - The desired length of the generated password. Defaults to 16. - * @returns A random password consisting of uppercase letters, lowercase letters, and digits. - */ -export function generatePassword(length = 16): string { - const chars = - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; - let result = ""; - const charsLength = chars.length; - const randomValues = new Uint32Array(length); - - crypto.getRandomValues(randomValues); - - for (let i = 0; i < length; i++) { - result += chars.charAt(randomValues[i] % charsLength); - } - - return result; -} - -/** - * Provisions an eVault environment in a dedicated Kubernetes namespace and returns its accessible URL. - * - * Creates a namespace, persistent volume claims, a deployment with Neo4j and eVault containers, and a LoadBalancer service. The Neo4j password is derived by hashing the domain part of the provided {@link w3id}. The function determines the service endpoint using the LoadBalancer IP/hostname, node IP and NodePort, or Minikube IP as a fallback. - * - * @param w3id - The W3ID identifier, used to derive the namespace and database password. - * @param eVaultId - The unique identifier for the eVault instance. - * @returns The HTTP URL for accessing the provisioned eVault service. - * - * @throws {Error} If the service endpoint cannot be determined from the cluster. - */ -export async function provisionEVault(w3id: string, registryUrl: string, publicKey: string) { - console.log("starting to provision"); - const idParts = w3id.split("@"); - w3id = idParts[idParts.length - 1]; - const neo4jPassword = sha256(w3id); - - const kc = new k8s.KubeConfig(); - kc.loadFromDefault(); - - const coreApi = kc.makeApiClient(k8s.CoreV1Api); - const appsApi = kc.makeApiClient(k8s.AppsV1Api); - - const namespaceName = `evault-${w3id}`; - const containerPort = 4000; - - const namespace = await coreApi.createNamespace({ - body: { metadata: { name: namespaceName } }, - }); - - const pvcSpec = (name: string) => ({ - metadata: { name, namespace: namespaceName }, - spec: { - accessModes: ["ReadWriteOnce"], - resources: { requests: { storage: "1Gi" } }, - }, - }); - await coreApi.createNamespacedPersistentVolumeClaim({ - namespace: namespaceName, - body: pvcSpec("neo4j-data"), - }); - await coreApi.createNamespacedPersistentVolumeClaim({ - namespace: namespaceName, - body: pvcSpec("evault-store"), - }); - await coreApi.createNamespacedPersistentVolumeClaim({ - namespace: namespaceName, - body: { - metadata: { name: "evault-secrets", namespace: namespaceName }, - spec: { - accessModes: ["ReadWriteOnce"], - resources: { - requests: { - storage: "2Mi", - }, - }, - }, - }, - }); - - const deployment = { - metadata: { name: "evault", namespace: namespaceName }, - spec: { - replicas: 1, - selector: { matchLabels: { app: "evault" } }, - template: { - metadata: { labels: { app: "evault" } }, - spec: { - containers: [ - { - name: "neo4j", - image: "neo4j:5.15", - ports: [{ containerPort: 7687 }], - env: [ - { - name: "NEO4J_AUTH", - value: `neo4j/${neo4jPassword}`, - }, - { - name: "dbms.connector.bolt.listen_address", - value: "0.0.0.0:7687", - }, - ], - volumeMounts: [ - { name: "neo4j-data", mountPath: "/data" }, - ], - }, - { - name: "evault", - image: "merulauvo/evault:latest", - // image: "local-evault:latest", - // imagePullPolicy: "Never", - ports: [{ containerPort }], - env: [ - { - name: "NEO4J_URI", - value: "bolt://localhost:7687", - }, - { name: "NEO4J_USER", value: "neo4j" }, - { - name: "REGISTRY_URL", - value: registryUrl, - }, - { - name: "EVAULT_PUBLIC_KEY", - value: publicKey - }, - { - name: "NEO4J_PASSWORD", - value: neo4jPassword, - }, - { - name: "PORT", - value: containerPort.toString(), - }, - { name: "W3ID", value: w3id }, - { - name: "ENCRYPTION_PASSWORD", - value: neo4jPassword, - }, - { - name: "SECRETS_STORE_PATH", - value: "/secrets", - }, - ], - volumeMounts: [ - { - name: "evault-store", - mountPath: "/evault/data", - }, - ], - }, - ], - volumes: [ - { - name: "neo4j-data", - persistentVolumeClaim: { claimName: "neo4j-data" }, - }, - { - name: "evault-store", - persistentVolumeClaim: { - claimName: "evault-store", - }, - }, - { - name: "evault-secrets", - persistentVolumeClaim: { - claimName: "evault-secrets", - }, - }, - ], - }, - }, - }, - }; - - await appsApi.createNamespacedDeployment({ - body: deployment, - namespace: namespaceName, - }); - - await coreApi.createNamespacedService({ - namespace: namespaceName, - body: { - apiVersion: "v1", - kind: "Service", - metadata: { name: "evault-service" }, - spec: { - type: "NodePort", - selector: { app: "evault" }, - ports: [ - { - port: 4000, - targetPort: 4000, - }, - ], - }, - }, - }); - - // Get the service and node info - const svc = await coreApi.readNamespacedService({ - name: "evault-service", - namespace: namespaceName, - }); - const nodePort = svc.spec?.ports?.[0]?.nodePort; - if (!nodePort) throw new Error("No NodePort assigned"); - - // Get the node's external IP - const nodes = await coreApi.listNode(); - const node = nodes.items[0]; - if (!node) throw new Error("No nodes found in cluster"); - - let externalIP = node.status?.addresses?.find( - (addr) => addr.type === "ExternalIP" - )?.address; - - if (!externalIP) externalIP = process.env.IP_ADDR; - return `http://${externalIP}:${nodePort}`; -} diff --git a/infrastructure/evault-provisioner/test-notification.js b/infrastructure/evault-provisioner/test-notification.js deleted file mode 100644 index 62c9e756..00000000 --- a/infrastructure/evault-provisioner/test-notification.js +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env node - -/** - * Test script for sending notifications via evault-provisioner - * - * Usage: - * 1. Set NOTIFICATION_SHARED_SECRET in your .env file - * 2. Run: node test-notification.js - * 3. Or use curl commands below - */ - -import axios from 'axios'; - -const PROVISIONER_URL = process.env.PROVISIONER_URL || 'http://localhost:3001'; -const SHARED_SECRET = process.env.NOTIFICATION_SHARED_SECRET || 'your-secret-here'; - -// Test data - replace with actual eName from your database -const TEST_ENAME = 'test@example.com'; // Replace with actual eName -const TEST_NOTIFICATION = { - title: 'Test Notification', - body: 'This is a test notification from evault-provisioner!', - data: { - type: 'test', - timestamp: new Date().toISOString() - } -}; - -async function testNotification() { - try { - console.log('🚀 Testing notification system...'); - console.log(`📡 Sending to: ${PROVISIONER_URL}/api/notifications/send`); - console.log(`👤 Target eName: ${TEST_ENAME}`); - console.log(`📝 Notification:`, TEST_NOTIFICATION); - - const response = await axios.post(`${PROVISIONER_URL}/api/notifications/send`, { - eName: TEST_ENAME, - notification: TEST_NOTIFICATION, - sharedSecret: SHARED_SECRET - }); - - console.log('✅ Success!', response.data); - } catch (error: any) { - console.error('❌ Error:', error.response?.data || error.message); - } -} - -async function getDeviceStats() { - try { - console.log('\n📊 Getting device stats...'); - const response = await axios.get(`${PROVISIONER_URL}/api/devices/stats`); - console.log('📈 Device Stats:', response.data); - } catch (error: any) { - console.error('❌ Error getting stats:', error.response?.data || error.message); - } -} - -// Run tests -async function main() { - await testNotification(); - await getDeviceStats(); -} - -main(); diff --git a/infrastructure/evault-provisioner/test-real-notifications.js b/infrastructure/evault-provisioner/test-real-notifications.js deleted file mode 100644 index 124dd351..00000000 --- a/infrastructure/evault-provisioner/test-real-notifications.js +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env node - -/** - * Test script for the REAL notification system - * - * This shows how notifications are actually stored and retrieved - */ - -import axios from 'axios'; - -const PROVISIONER_URL = process.env.PROVISIONER_URL || 'http://localhost:3001'; -const SHARED_SECRET = process.env.NOTIFICATION_SHARED_SECRET || 'your-secret-here'; - -// Test data -const TEST_ENAME = 'test@example.com'; - -async function testRealNotificationFlow() { - try { - console.log('🧪 Testing REAL notification flow...\n'); - - // Step 1: Send a notification (this stores it in the database) - console.log('1️⃣ Sending notification (storing in database)...'); - const sendResponse = await axios.post(`${PROVISIONER_URL}/api/notifications/send`, { - eName: TEST_ENAME, - notification: { - title: 'Hello from Provisioner!', - body: 'This notification was stored in the database and will be delivered when you check.', - data: { - type: 'test', - timestamp: new Date().toISOString(), - source: 'provisioner-test' - } - }, - sharedSecret: SHARED_SECRET - }); - - console.log('✅ Notification sent:', sendResponse.data); - console.log('📝 Notification is now stored in the database\n'); - - // Step 2: Check for notifications (this retrieves and marks as delivered) - console.log('2️⃣ Checking for notifications (retrieving from database)...'); - const checkResponse = await axios.post(`${PROVISIONER_URL}/api/notifications/check`, { - eName: TEST_ENAME, - deviceId: 'test-device-123' - }); - - console.log('✅ Notifications retrieved:', checkResponse.data); - console.log('📱 These notifications should now appear in the eid-wallet app!\n'); - - // Step 3: Check again (should be empty now) - console.log('3️⃣ Checking again (should be empty now)...'); - const checkAgainResponse = await axios.post(`${PROVISIONER_URL}/api/notifications/check`, { - eName: TEST_ENAME, - deviceId: 'test-device-123' - }); - - console.log('✅ Second check result:', checkAgainResponse.data); - console.log('🎉 Notification system working correctly!'); - - } catch (error: any) { - console.error('❌ Error:', error.response?.data || error.message); - } -} - -// Run the test -testRealNotificationFlow(); diff --git a/infrastructure/evault-provisioner/tsconfig.json b/infrastructure/evault-provisioner/tsconfig.json deleted file mode 100644 index 87ff34e2..00000000 --- a/infrastructure/evault-provisioner/tsconfig.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "compilerOptions": { - "target": "ES2020", - "module": "commonjs", - "moduleResolution": "node", - "esModuleInterop": true, - "strict": true, - "skipLibCheck": true, - "forceConsistentCasingInFileNames": true, - "outDir": "dist", - "rootDir": "src", - "sourceMap": true, - "declaration": true, - "experimentalDecorators": true, - "emitDecoratorMetadata": true - }, - "include": ["src/**/*"], - "exclude": ["node_modules", "dist"] -} \ No newline at end of file diff --git a/infrastructure/web3-adapter/src/db/index.js b/infrastructure/web3-adapter/src/db/index.js index c8fda5d5..1b7d0ad0 100644 --- a/infrastructure/web3-adapter/src/db/index.js +++ b/infrastructure/web3-adapter/src/db/index.js @@ -1,18 +1,39 @@ "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; +var __createBinding = + (this && this.__createBinding) || + (Object.create + ? function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if ( + !desc || + ("get" in desc + ? !m.__esModule + : desc.writable || desc.configurable) + ) { + desc = { + enumerable: true, + get: function () { + return m[k]; + }, + }; + } + Object.defineProperty(o, k2, desc); + } + : function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); +var __exportStar = + (this && this.__exportStar) || + function (m, exports) { + for (var p in m) + if ( + p !== "default" && + !Object.prototype.hasOwnProperty.call(exports, p) + ) + __createBinding(exports, m, p); + }; Object.defineProperty(exports, "__esModule", { value: true }); __exportStar(require("./mapping.db"), exports); -//# sourceMappingURL=index.js.map \ No newline at end of file +//# sourceMappingURL=index.js.map diff --git a/infrastructure/web3-adapter/src/db/mapping.db.js b/infrastructure/web3-adapter/src/db/mapping.db.js index 74e34eca..3866ae0a 100644 --- a/infrastructure/web3-adapter/src/db/mapping.db.js +++ b/infrastructure/web3-adapter/src/db/mapping.db.js @@ -1,7 +1,9 @@ "use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; +var __importDefault = + (this && this.__importDefault) || + function (mod) { + return mod && mod.__esModule ? mod : { default: mod }; + }; Object.defineProperty(exports, "__esModule", { value: true }); exports.MappingDatabase = void 0; const node_path_1 = require("node:path"); @@ -38,7 +40,9 @@ class MappingDatabase { async storeMapping(params) { // Validate inputs if (!params.localId || !params.globalId) { - throw new Error("Invalid mapping parameters: all fields are required"); + throw new Error( + "Invalid mapping parameters: all fields are required", + ); } console.log("storing mapping g:l", params.globalId, params.localId); // Check if mapping already exists @@ -46,8 +50,11 @@ class MappingDatabase { if (existingMapping) { return; } - await this.runAsync(`INSERT INTO id_mappings (local_id, global_id) - VALUES (?, ?)`, [params.localId, params.globalId]); + await this.runAsync( + `INSERT INTO id_mappings (local_id, global_id) + VALUES (?, ?)`, + [params.localId, params.globalId], + ); const storedMapping = await this.getGlobalId(params.localId); if (storedMapping !== params.globalId) { console.log("storedMappingError", storedMapping, params.globalId); @@ -63,12 +70,14 @@ class MappingDatabase { return null; } try { - const result = await this.getAsync(`SELECT global_id + const result = await this.getAsync( + `SELECT global_id FROM id_mappings - WHERE local_id = ?`, [localId]); + WHERE local_id = ?`, + [localId], + ); return result?.global_id ?? null; - } - catch (error) { + } catch (error) { console.error("Error getting global ID:", error); return null; } @@ -81,12 +90,14 @@ class MappingDatabase { return null; } try { - const result = await this.getAsync(`SELECT local_id + const result = await this.getAsync( + `SELECT local_id FROM id_mappings - WHERE global_id = ?`, [globalId]); + WHERE global_id = ?`, + [globalId], + ); return result?.local_id ?? null; - } - catch (error) { + } catch (error) { return null; } } @@ -97,8 +108,11 @@ class MappingDatabase { if (!localId) { return; } - await this.runAsync(`DELETE FROM id_mappings - WHERE local_id = ?`, [localId]); + await this.runAsync( + `DELETE FROM id_mappings + WHERE local_id = ?`, + [localId], + ); } /** * Get all mappings @@ -111,8 +125,7 @@ class MappingDatabase { localId: local_id, globalId: global_id, })); - } - catch (error) { + } catch (error) { return []; } } @@ -122,11 +135,10 @@ class MappingDatabase { close() { try { this.db.close(); - } - catch (error) { + } catch (error) { console.error("Error closing database connection:", error); } } } exports.MappingDatabase = MappingDatabase; -//# sourceMappingURL=mapping.db.js.map \ No newline at end of file +//# sourceMappingURL=mapping.db.js.map diff --git a/infrastructure/web3-adapter/src/evault/evault.js b/infrastructure/web3-adapter/src/evault/evault.js index 334b9825..034b30e4 100644 --- a/infrastructure/web3-adapter/src/evault/evault.js +++ b/infrastructure/web3-adapter/src/evault/evault.js @@ -68,8 +68,7 @@ class EVaultClient { * Cleanup method to properly dispose of resources */ dispose() { - if (this.isDisposed) - return; + if (this.isDisposed) return; this.isDisposed = true; this.clients.clear(); this.endpoints.clear(); @@ -85,19 +84,18 @@ class EVaultClient { for (let attempt = 0; attempt <= maxRetries; attempt++) { try { return await operation(); - } - catch (error) { + } catch (error) { lastError = error; // Don't retry on the last attempt - if (attempt === maxRetries) - break; + if (attempt === maxRetries) break; // Don't retry on certain errors if (error instanceof Error) { - const isRetryable = !(error.message.includes("401") || + const isRetryable = !( + error.message.includes("401") || error.message.includes("403") || - error.message.includes("404")); - if (!isRetryable) - break; + error.message.includes("404") + ); + if (!isRetryable) break; } // Exponential backoff const delay = CONFIG.RETRY_DELAY * 2 ** attempt; @@ -113,17 +111,23 @@ class EVaultClient { */ async requestPlatformToken() { try { - const response = await fetch(new URL("/platforms/certification", this.registryUrl).toString(), { - method: "POST", - headers: { - "Content-Type": "application/json", + const response = await fetch( + new URL( + "/platforms/certification", + this.registryUrl, + ).toString(), + { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ platform: this.platform }), }, - body: JSON.stringify({ platform: this.platform }), - }); + ); if (!response.ok) { throw new Error(`HTTP error! status: ${response.status}`); } - const data = (await response.json()); + const data = await response.json(); const now = Date.now(); const expiresAt = data.expiresAt || now + 3600000; // Default 1 hour return { @@ -131,8 +135,7 @@ class EVaultClient { expiresAt, obtainedAt: now, }; - } - catch (error) { + } catch (error) { console.error("Error requesting platform token:", error); throw new Error("Failed to request platform token"); } @@ -141,8 +144,7 @@ class EVaultClient { * Checks if token needs refresh */ isTokenExpired() { - if (!this.tokenInfo) - return true; + if (!this.tokenInfo) return true; const now = Date.now(); const timeUntilExpiry = this.tokenInfo.expiresAt - now; return timeUntilExpiry <= CONFIG.TOKEN_REFRESH_THRESHOLD; @@ -161,14 +163,18 @@ class EVaultClient { try { const enrichedW3id = w3id.startsWith("@") ? w3id : `@${w3id}`; console.log("fetching endpoint for :", enrichedW3id); - const response = await fetch(new URL(`/resolve?w3id=${enrichedW3id}`, this.registryUrl).toString()); + const response = await fetch( + new URL( + `/resolve?w3id=${enrichedW3id}`, + this.registryUrl, + ).toString(), + ); if (!response.ok) { throw new Error(`HTTP error! status: ${response.status}`); } const data = await response.json(); return new URL("/graphql", data.uri).toString(); - } - catch (error) { + } catch (error) { console.error("Error resolving eVault endpoint:", error); throw new Error("Failed to resolve eVault endpoint"); } @@ -183,18 +189,24 @@ class EVaultClient { const endpoint = this.endpoints.get(w3id); // Check if the cached endpoint is still healthy if (await this.isEndpointHealthy(w3id, endpoint)) { - console.log("reusing existing client for w3id:", w3id, "endpoint:", endpoint); + console.log( + "reusing existing client for w3id:", + w3id, + "endpoint:", + endpoint, + ); return client; - } - else { - console.log("cached endpoint is unhealthy, removing and re-resolving for w3id:", w3id); + } else { + console.log( + "cached endpoint is unhealthy, removing and re-resolving for w3id:", + w3id, + ); this.removeCachedClient(w3id); } } // Resolve endpoint for this specific w3id const endpoint = await this.resolveEndpoint(w3id).catch(() => null); - if (!endpoint) - throw new Error("Failed to resolve endpoint"); + if (!endpoint) throw new Error("Failed to resolve endpoint"); // Get platform token and create client with authorization header const token = await this.ensurePlatformToken(); const client = new graphql_request_1.GraphQLClient(endpoint, { @@ -208,7 +220,12 @@ class EVaultClient { // Initialize health check tracking this.healthCheckFailures.set(w3id, 0); this.lastHealthCheck.set(w3id, Date.now()); - console.log("created new client for w3id:", w3id, "endpoint:", endpoint); + console.log( + "created new client for w3id:", + w3id, + "endpoint:", + endpoint, + ); return client; } /** @@ -228,9 +245,14 @@ class EVaultClient { } // Perform health check on the whois endpoint const healthCheckUrl = `${baseUrl}/whois`; - console.log(`Health checking endpoint for ${w3id}: ${healthCheckUrl}`); + console.log( + `Health checking endpoint for ${w3id}: ${healthCheckUrl}`, + ); const controller = new AbortController(); - const timeoutId = setTimeout(() => controller.abort(), CONFIG.HEALTH_CHECK_TIMEOUT); + const timeoutId = setTimeout( + () => controller.abort(), + CONFIG.HEALTH_CHECK_TIMEOUT, + ); const response = await fetch(healthCheckUrl, { method: "HEAD", signal: controller.signal, @@ -241,13 +263,16 @@ class EVaultClient { this.healthCheckFailures.set(w3id, 0); this.lastHealthCheck.set(w3id, now); return true; + } else { + throw new Error( + `Health check failed with status: ${response.status}`, + ); } - else { - throw new Error(`Health check failed with status: ${response.status}`); - } - } - catch (error) { - console.log(`Health check failed for ${w3id}:`, error instanceof Error ? error.message : "Unknown error"); + } catch (error) { + console.log( + `Health check failed for ${w3id}:`, + error instanceof Error ? error.message : "Unknown error", + ); // Increment failure count const currentFailures = this.healthCheckFailures.get(w3id) || 0; const newFailures = currentFailures + 1; @@ -255,7 +280,9 @@ class EVaultClient { this.lastHealthCheck.set(w3id, Date.now()); // If we've had too many consecutive failures, mark as unhealthy if (newFailures >= CONFIG.MAX_HEALTH_CHECK_FAILURES) { - console.log(`Endpoint for ${w3id} marked as unhealthy after ${newFailures} consecutive failures`); + console.log( + `Endpoint for ${w3id} marked as unhealthy after ${newFailures} consecutive failures`, + ); return false; } // Still allow some failures before marking as unhealthy @@ -279,18 +306,21 @@ class EVaultClient { const controller = new AbortController(); const timeoutId = setTimeout(() => { controller.abort(); - console.log(`GraphQL request timeout for ${w3id}, marking endpoint as unhealthy`); + console.log( + `GraphQL request timeout for ${w3id}, marking endpoint as unhealthy`, + ); this.removeCachedClient(w3id); }, CONFIG.GRAPHQL_TIMEOUT); try { const result = await operation(); clearTimeout(timeoutId); return result; - } - catch (error) { + } catch (error) { clearTimeout(timeoutId); if (error instanceof Error && error.name === "AbortError") { - throw new Error(`Request timeout after ${CONFIG.GRAPHQL_TIMEOUT}ms`); + throw new Error( + `Request timeout after ${CONFIG.GRAPHQL_TIMEOUT}ms`, + ); } throw error; } @@ -313,7 +343,9 @@ class EVaultClient { this.lastHealthCheck.set(w3id, 0); const isHealthy = await this.isEndpointHealthy(w3id, endpoint); if (!isHealthy) { - console.log(`Forced health check failed for ${w3id}, removing cached client`); + console.log( + `Forced health check failed for ${w3id}, removing cached client`, + ); this.removeCachedClient(w3id); } return isHealthy; @@ -351,19 +383,19 @@ class EVaultClient { const client = await this.ensureClient(envelope.w3id).catch(() => { return null; }); - if (!client) - return (0, uuid_1.v4)(); + if (!client) return (0, uuid_1.v4)(); console.log("sending to eVault: ", envelope.w3id); console.log("sending payload", envelope); - const response = await this.withTimeout(envelope.w3id, () => client.request(STORE_META_ENVELOPE, { - input: { - ontology: envelope.schemaId, - payload: envelope.data, - acl: ["*"], - }, - })).catch(() => null); - if (!response) - return (0, uuid_1.v4)(); + const response = await this.withTimeout(envelope.w3id, () => + client.request(STORE_META_ENVELOPE, { + input: { + ontology: envelope.schemaId, + payload: envelope.data, + acl: ["*"], + }, + }), + ).catch(() => null); + if (!response) return (0, uuid_1.v4)(); return response.storeMetaEnvelope.metaEnvelope.id; }); } @@ -372,14 +404,14 @@ class EVaultClient { const client = await this.ensureClient(w3id); const response = await client .request(STORE_META_ENVELOPE, { - input: { - ontology: "reference", - payload: { - _by_reference: referenceId, + input: { + ontology: "reference", + payload: { + _by_reference: referenceId, + }, + acl: ["*"], }, - acl: ["*"], - }, - }) + }) .catch(() => null); if (!response) { console.error("Failed to store reference"); @@ -396,8 +428,7 @@ class EVaultClient { w3id, }); return response.metaEnvelope; - } - catch (error) { + } catch (error) { console.error("Error fetching meta envelope:", error); throw error; } @@ -406,7 +437,9 @@ class EVaultClient { async updateMetaEnvelopeById(id, envelope) { return this.withRetry(async () => { console.log("sending to eVault", envelope.w3id); - const client = await this.ensureClient(envelope.w3id).catch(() => null); + const client = await this.ensureClient(envelope.w3id).catch( + () => null, + ); if (!client) throw new Error("Failed to establish client connection"); try { @@ -418,9 +451,11 @@ class EVaultClient { acl: ["*"], }, }; - const response = await client.request(UPDATE_META_ENVELOPE, variables); - } - catch (error) { + const response = await client.request( + UPDATE_META_ENVELOPE, + variables, + ); + } catch (error) { console.error("Error updating meta envelope:", error); throw error; } @@ -428,4 +463,4 @@ class EVaultClient { } } exports.EVaultClient = EVaultClient; -//# sourceMappingURL=evault.js.map \ No newline at end of file +//# sourceMappingURL=evault.js.map diff --git a/infrastructure/web3-adapter/src/index.js b/infrastructure/web3-adapter/src/index.js index 313a71d0..bc0148a7 100644 --- a/infrastructure/web3-adapter/src/index.js +++ b/infrastructure/web3-adapter/src/index.js @@ -1,40 +1,71 @@ "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || (function () { - var ownKeys = function(o) { - ownKeys = Object.getOwnPropertyNames || function (o) { - var ar = []; - for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; - return ar; +var __createBinding = + (this && this.__createBinding) || + (Object.create + ? function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if ( + !desc || + ("get" in desc + ? !m.__esModule + : desc.writable || desc.configurable) + ) { + desc = { + enumerable: true, + get: function () { + return m[k]; + }, + }; + } + Object.defineProperty(o, k2, desc); + } + : function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); +var __setModuleDefault = + (this && this.__setModuleDefault) || + (Object.create + ? function (o, v) { + Object.defineProperty(o, "default", { + enumerable: true, + value: v, + }); + } + : function (o, v) { + o["default"] = v; + }); +var __importStar = + (this && this.__importStar) || + (function () { + var ownKeys = function (o) { + ownKeys = + Object.getOwnPropertyNames || + function (o) { + var ar = []; + for (var k in o) + if (Object.prototype.hasOwnProperty.call(o, k)) + ar[ar.length] = k; + return ar; + }; + return ownKeys(o); }; - return ownKeys(o); - }; - return function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); - __setModuleDefault(result, mod); - return result; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) + for (var k = ownKeys(mod), i = 0; i < k.length; i++) + if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; + })(); +var __importDefault = + (this && this.__importDefault) || + function (mod) { + return mod && mod.__esModule ? mod : { default: mod }; }; -})(); -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; Object.defineProperty(exports, "__esModule", { value: true }); exports.Web3Adapter = void 0; exports.spinUpEVault = spinUpEVault; @@ -58,28 +89,38 @@ async function spinUpEVault(registryUrl, provisionerUrl, verificationCode) { const DEMO_CODE_W3DS = "d66b7138-538a-465f-a6ce-f6985854c3f4"; const finalVerificationCode = verificationCode || DEMO_CODE_W3DS; try { - const entropyResponse = await axios_1.default.get(new URL("/entropy", registryUrl).toString()); + const entropyResponse = await axios_1.default.get( + new URL("/entropy", registryUrl).toString(), + ); const registryEntropy = entropyResponse.data.token; const namespace = (0, uuid_1.v4)(); - const provisionResponse = await axios_1.default.post(new URL("/provision", provisionerUrl).toString(), { - registryEntropy, - namespace, - verificationId: finalVerificationCode, - publicKey: "0x0000000000000000000000000000000000000000", - }); + const provisionResponse = await axios_1.default.post( + new URL("/provision", provisionerUrl).toString(), + { + registryEntropy, + namespace, + verificationId: finalVerificationCode, + publicKey: "0x0000000000000000000000000000000000000000", + }, + ); if (!provisionResponse.data.success) { - throw new Error(`Failed to provision eVault: ${provisionResponse.data.message || "Unknown error"}`); + throw new Error( + `Failed to provision eVault: ${provisionResponse.data.message || "Unknown error"}`, + ); } return { w3id: provisionResponse.data.w3id, uri: provisionResponse.data.uri, }; - } - catch (error) { + } catch (error) { if (axios_1.default.isAxiosError(error)) { - throw new Error(`Failed to spin up eVault: ${error.response?.data?.message || error.message}`); + throw new Error( + `Failed to spin up eVault: ${error.response?.data?.message || error.message}`, + ); } - throw new Error(`Failed to spin up eVault: ${error instanceof Error ? error.message : "Unknown error"}`); + throw new Error( + `Failed to spin up eVault: ${error instanceof Error ? error.message : "Unknown error"}`, + ); } } /** @@ -90,31 +131,52 @@ async function spinUpEVault(registryUrl, provisionerUrl, verificationCode) { * @param verificationCode - Optional verification code, defaults to demo code * @returns Promise with eVault details (w3id, uri, manifestId) */ -async function createGroupEVault(registryUrl, provisionerUrl, groupData, verificationCode) { +async function createGroupEVault( + registryUrl, + provisionerUrl, + groupData, + verificationCode, +) { const DEMO_CODE_W3DS = "d66b7138-538a-465f-a6ce-f6985854c3f4"; const finalVerificationCode = verificationCode || DEMO_CODE_W3DS; try { // Step 1: Spin up the eVault - const evault = await spinUpEVault(registryUrl, provisionerUrl, finalVerificationCode); + const evault = await spinUpEVault( + registryUrl, + provisionerUrl, + finalVerificationCode, + ); // Step 2: Create GroupManifest with exponential backoff - const manifestId = await createGroupManifestWithRetry(registryUrl, evault.w3id, groupData); + const manifestId = await createGroupManifestWithRetry( + registryUrl, + evault.w3id, + groupData, + ); return { w3id: evault.w3id, uri: evault.uri, manifestId, }; - } - catch (error) { + } catch (error) { if (axios_1.default.isAxiosError(error)) { - throw new Error(`Failed to create group eVault: ${error.response?.data?.message || error.message}`); + throw new Error( + `Failed to create group eVault: ${error.response?.data?.message || error.message}`, + ); } - throw new Error(`Failed to create group eVault: ${error instanceof Error ? error.message : "Unknown error"}`); + throw new Error( + `Failed to create group eVault: ${error instanceof Error ? error.message : "Unknown error"}`, + ); } } /** * Create GroupManifest in eVault with exponential backoff retry mechanism */ -async function createGroupManifestWithRetry(registryUrl, w3id, groupData, maxRetries = 10) { +async function createGroupManifestWithRetry( + registryUrl, + w3id, + groupData, + maxRetries = 10, +) { const now = new Date().toISOString(); const groupManifest = { eName: w3id, @@ -130,10 +192,16 @@ async function createGroupManifestWithRetry(registryUrl, w3id, groupData, maxRet }; for (let attempt = 1; attempt <= maxRetries; attempt++) { try { - console.log(`Attempting to create GroupManifest in eVault (attempt ${attempt}/${maxRetries})`); - const response = await axios_1.default.get(new URL(`resolve?w3id=${w3id}`, registryUrl).toString()); + console.log( + `Attempting to create GroupManifest in eVault (attempt ${attempt}/${maxRetries})`, + ); + const response = await axios_1.default.get( + new URL(`resolve?w3id=${w3id}`, registryUrl).toString(), + ); const endpoint = new URL("/graphql", response.data.uri).toString(); - const { GraphQLClient } = await Promise.resolve().then(() => __importStar(require("graphql-request"))); + const { GraphQLClient } = await Promise.resolve().then(() => + __importStar(require("graphql-request")), + ); const client = new GraphQLClient(endpoint); const STORE_META_ENVELOPE = ` mutation StoreMetaEnvelope($input: MetaEnvelopeInput!) { @@ -154,13 +222,20 @@ async function createGroupManifestWithRetry(registryUrl, w3id, groupData, maxRet }, }); const manifestId = result.storeMetaEnvelope.metaEnvelope.id; - console.log("GroupManifest created successfully in eVault:", manifestId); + console.log( + "GroupManifest created successfully in eVault:", + manifestId, + ); return manifestId; - } - catch (error) { - console.error(`Failed to create GroupManifest in eVault (attempt ${attempt}/${maxRetries}):`, error); + } catch (error) { + console.error( + `Failed to create GroupManifest in eVault (attempt ${attempt}/${maxRetries}):`, + error, + ); if (attempt === maxRetries) { - console.error("Max retries reached, giving up on GroupManifest creation"); + console.error( + "Max retries reached, giving up on GroupManifest creation", + ); throw error; } // Wait before retrying (exponential backoff) @@ -178,14 +253,19 @@ class Web3Adapter { this.lockedIds = []; this.readPaths(); this.mappingDb = new db_1.MappingDatabase(config.dbPath); - this.evaultClient = new evault_1.EVaultClient(config.registryUrl, config.platform); + this.evaultClient = new evault_1.EVaultClient( + config.registryUrl, + config.platform, + ); this.platform = config.platform; } async readPaths() { const allRawFiles = await fs.readdir(this.config.schemasPath); const mappingFiles = allRawFiles.filter((p) => p.endsWith(".json")); for (const mappingFile of mappingFiles) { - const mappingFileContent = await fs.readFile(node_path_1.default.join(this.config.schemasPath, mappingFile)); + const mappingFileContent = await fs.readFile( + node_path_1.default.join(this.config.schemasPath, mappingFile), + ); const mappingParsed = JSON.parse(mappingFileContent.toString()); this.mapping[mappingParsed.tableName] = mappingParsed; } @@ -200,16 +280,14 @@ class Web3Adapter { async handleChange(props) { const { data, tableName, participants } = props; const existingGlobalId = await this.mappingDb.getGlobalId(data.id); - if (!this.mapping[tableName]) - return; + if (!this.mapping[tableName]) return; if (this.mapping[tableName].readOnly) { // early return on mappings which are readonly so as to not // sync any update to the eVault which is not warranted return; } if (existingGlobalId) { - if (this.lockedIds.includes(existingGlobalId)) - return; + if (this.lockedIds.includes(existingGlobalId)) return; const global = await (0, mapper_1.toGlobal)({ data, mapping: this.mapping[tableName], @@ -217,11 +295,11 @@ class Web3Adapter { }); this.evaultClient .updateMetaEnvelopeById(existingGlobalId, { - id: existingGlobalId, - w3id: global.ownerEvault, - data: global.data, - schemaId: this.mapping[tableName].schemaId, - }) + id: existingGlobalId, + w3id: global.ownerEvault, + data: global.data, + schemaId: this.mapping[tableName].schemaId, + }) .catch(() => console.error("failed to sync update")); logging_1.logger.info({ tableName, @@ -249,8 +327,7 @@ class Web3Adapter { schemaId: this.mapping[tableName].schemaId, }); console.log("created new meta-env", globalId); - } - else { + } else { return; } // Store the mapping @@ -259,9 +336,14 @@ class Web3Adapter { globalId, }); // Handle references for other participants - const otherEvaults = (participants ?? []).filter((i) => i !== global.ownerEvault); + const otherEvaults = (participants ?? []).filter( + (i) => i !== global.ownerEvault, + ); for (const evault of otherEvaults) { - await this.evaultClient.storeReference(`${global.ownerEvault}/${globalId}`, evault); + await this.evaultClient.storeReference( + `${global.ownerEvault}/${globalId}`, + evault, + ); } logging_1.logger.info({ tableName, @@ -292,11 +374,18 @@ class Web3Adapter { * @returns Promise with eVault details (w3id, uri) */ async spinUpEVault(verificationCode, provisionerUrl) { - const finalProvisionerUrl = provisionerUrl || this.config.provisionerUrl; + const finalProvisionerUrl = + provisionerUrl || this.config.provisionerUrl; if (!finalProvisionerUrl) { - throw new Error("Provisioner URL is required. Please provide it in config or as parameter."); + throw new Error( + "Provisioner URL is required. Please provide it in config or as parameter.", + ); } - return spinUpEVault(this.config.registryUrl, finalProvisionerUrl, verificationCode); + return spinUpEVault( + this.config.registryUrl, + finalProvisionerUrl, + verificationCode, + ); } /** * Creates a group eVault with GroupManifest @@ -306,12 +395,20 @@ class Web3Adapter { * @returns Promise with eVault details (w3id, uri, manifestId) */ async createGroupEVault(groupData, verificationCode, provisionerUrl) { - const finalProvisionerUrl = provisionerUrl || this.config.provisionerUrl; + const finalProvisionerUrl = + provisionerUrl || this.config.provisionerUrl; if (!finalProvisionerUrl) { - throw new Error("Provisioner URL is required. Please provide it in config or as parameter."); + throw new Error( + "Provisioner URL is required. Please provide it in config or as parameter.", + ); } - return createGroupEVault(this.config.registryUrl, finalProvisionerUrl, groupData, verificationCode); + return createGroupEVault( + this.config.registryUrl, + finalProvisionerUrl, + groupData, + verificationCode, + ); } } exports.Web3Adapter = Web3Adapter; -//# sourceMappingURL=index.js.map \ No newline at end of file +//# sourceMappingURL=index.js.map diff --git a/infrastructure/web3-adapter/src/logging/index.js b/infrastructure/web3-adapter/src/logging/index.js index 06388ce6..d8aecc9b 100644 --- a/infrastructure/web3-adapter/src/logging/index.js +++ b/infrastructure/web3-adapter/src/logging/index.js @@ -1,19 +1,40 @@ "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; +var __createBinding = + (this && this.__createBinding) || + (Object.create + ? function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if ( + !desc || + ("get" in desc + ? !m.__esModule + : desc.writable || desc.configurable) + ) { + desc = { + enumerable: true, + get: function () { + return m[k]; + }, + }; + } + Object.defineProperty(o, k2, desc); + } + : function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); +var __exportStar = + (this && this.__exportStar) || + function (m, exports) { + for (var p in m) + if ( + p !== "default" && + !Object.prototype.hasOwnProperty.call(exports, p) + ) + __createBinding(exports, m, p); + }; Object.defineProperty(exports, "__esModule", { value: true }); __exportStar(require("./transport"), exports); __exportStar(require("./logger"), exports); -//# sourceMappingURL=index.js.map \ No newline at end of file +//# sourceMappingURL=index.js.map diff --git a/infrastructure/web3-adapter/src/logging/logger.js b/infrastructure/web3-adapter/src/logging/logger.js index 71ce7443..7f90651b 100644 --- a/infrastructure/web3-adapter/src/logging/logger.js +++ b/infrastructure/web3-adapter/src/logging/logger.js @@ -1,10 +1,12 @@ "use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; +var __importDefault = + (this && this.__importDefault) || + function (mod) { + return mod && mod.__esModule ? mod : { default: mod }; + }; Object.defineProperty(exports, "__esModule", { value: true }); exports.logger = void 0; const pino_1 = __importDefault(require("pino")); const transport_1 = require("./transport"); exports.logger = (0, pino_1.default)(transport_1.transport); -//# sourceMappingURL=logger.js.map \ No newline at end of file +//# sourceMappingURL=logger.js.map diff --git a/infrastructure/web3-adapter/src/logging/transport.js b/infrastructure/web3-adapter/src/logging/transport.js index f7d515ac..3ec044a1 100644 --- a/infrastructure/web3-adapter/src/logging/transport.js +++ b/infrastructure/web3-adapter/src/logging/transport.js @@ -1,7 +1,9 @@ "use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; +var __importDefault = + (this && this.__importDefault) || + function (mod) { + return mod && mod.__esModule ? mod : { default: mod }; + }; Object.defineProperty(exports, "__esModule", { value: true }); exports.transport = void 0; const pino_1 = require("pino"); @@ -22,4 +24,4 @@ exports.transport = (0, pino_1.transport)({ }, }, }); -//# sourceMappingURL=transport.js.map \ No newline at end of file +//# sourceMappingURL=transport.js.map diff --git a/infrastructure/web3-adapter/src/mapper/mapper.js b/infrastructure/web3-adapter/src/mapper/mapper.js index 5f6af11e..59592cbe 100644 --- a/infrastructure/web3-adapter/src/mapper/mapper.js +++ b/infrastructure/web3-adapter/src/mapper/mapper.js @@ -14,7 +14,9 @@ function getValueByPath(obj, path) { } // If there's a field path after [], map through the array if (fieldPath) { - return array.map((item) => getValueByPath(item, fieldPath.slice(1))); // Remove the leading dot + return array.map((item) => + getValueByPath(item, fieldPath.slice(1)), + ); // Remove the leading dot } return array; } @@ -22,8 +24,7 @@ function getValueByPath(obj, path) { const parts = path.split("."); // biome-ignore lint/suspicious/noExplicitAny: return parts.reduce((acc, part) => { - if (acc === null || acc === undefined) - return undefined; + if (acc === null || acc === undefined) return undefined; return acc[part]; }, obj); } @@ -50,20 +51,28 @@ async function extractOwnerEvault(data, ownerEnamePath) { .map((path) => path.trim()) .filter((path) => path.length > 0); if (paths.length < 2) { - console.warn("Invalid fallback path format. Expected 'path1||path2' but got:", ownerEnamePath); + console.warn( + "Invalid fallback path format. Expected 'path1||path2' but got:", + ownerEnamePath, + ); return null; } - console.log(`Processing fallback paths for owner eVault: [${paths.join(", ")}]`); + console.log( + `Processing fallback paths for owner eVault: [${paths.join(", ")}]`, + ); // Try each path in order until one succeeds for (let i = 0; i < paths.length; i++) { const path = paths[i]; - console.log(`Trying fallback path ${i + 1}/${paths.length}: ${path}`); + console.log( + `Trying fallback path ${i + 1}/${paths.length}: ${path}`, + ); const result = await extractOwnerEvaultSinglePath(data, path); if (result !== null) { - console.log(`✅ Owner eVault found using fallback path ${i + 1}: ${path}`); + console.log( + `✅ Owner eVault found using fallback path ${i + 1}: ${path}`, + ); return result; - } - else { + } else { console.log(`❌ Fallback path ${i + 1} failed: ${path}`); } } @@ -85,20 +94,23 @@ async function extractOwnerEvaultSinglePath(data, ownerEnamePath) { const [_, fieldPathRaw] = ownerEnamePath.split("("); const fieldPath = fieldPathRaw.replace(")", ""); let value = getValueByPath(data, fieldPath); - if (Array.isArray(value)) - return value[0]; + if (Array.isArray(value)) return value[0]; console.log("OWNER PATH", value); // Check if value is a string before calling .includes() - if (typeof value === "string" && + if ( + typeof value === "string" && value.includes("(") && - value.includes(")")) { + value.includes(")") + ) { value = value.split("(")[1].split(")")[0]; } return value || null; } -async function fromGlobal({ data, mapping, mappingStore, }) { +async function fromGlobal({ data, mapping, mappingStore }) { const result = {}; - for (const [localKey, globalPathRaw] of Object.entries(mapping.localToUniversalMap)) { + for (const [localKey, globalPathRaw] of Object.entries( + mapping.localToUniversalMap, + )) { let value; const targetKey = localKey; let tableRef = null; @@ -108,44 +120,43 @@ async function fromGlobal({ data, mapping, mappingStore, }) { if (outerFn === "date") { const calcMatch = innerExpr.match(/^calc\((.+)\)$/); if (calcMatch) { - const calcResult = evaluateCalcExpression(calcMatch[1], data); + const calcResult = evaluateCalcExpression( + calcMatch[1], + data, + ); value = calcResult !== undefined ? new Date(calcResult).toISOString() : undefined; - } - else { + } else { const rawVal = getValueByPath(data, innerExpr); if (typeof rawVal === "number") { value = new Date(rawVal).toISOString(); - } - else if (rawVal?._seconds) { + } else if (rawVal?._seconds) { // Handle Firebase v8 timestamp format value = new Date(rawVal._seconds * 1000).toISOString(); - } - else if (rawVal?.seconds) { + } else if (rawVal?.seconds) { // Handle Firebase v9+ timestamp format value = new Date(rawVal.seconds * 1000).toISOString(); - } - else if (rawVal?.toDate && - typeof rawVal.toDate === "function") { + } else if ( + rawVal?.toDate && + typeof rawVal.toDate === "function" + ) { // Handle Firebase Timestamp objects value = rawVal.toDate().toISOString(); - } - else if (rawVal instanceof Date) { + } else if (rawVal instanceof Date) { value = rawVal.toISOString(); - } - else if (typeof rawVal === "string" && - rawVal.includes("UTC")) { + } else if ( + typeof rawVal === "string" && + rawVal.includes("UTC") + ) { // Handle Firebase timestamp strings like "August 18, 2025 at 10:03:19 AM UTC+5:30" value = new Date(rawVal).toISOString(); - } - else { + } else { value = undefined; } } - } - else if (outerFn === "calc") { + } else if (outerFn === "calc") { value = evaluateCalcExpression(innerExpr, data); } result[targetKey] = value; @@ -161,12 +172,13 @@ async function fromGlobal({ data, mapping, mappingStore, }) { value = getValueByPath(data, pathRef); if (tableRef) { if (Array.isArray(value)) { - value = await Promise.all(value.map(async (v) => { - const localId = await mappingStore.getLocalId(v); - return localId ? `${tableRef}(${localId})` : null; - })); - } - else { + value = await Promise.all( + value.map(async (v) => { + const localId = await mappingStore.getLocalId(v); + return localId ? `${tableRef}(${localId})` : null; + }), + ); + } else { value = await mappingStore.getLocalId(value); value = value ? `${tableRef}(${value})` : null; } @@ -177,9 +189,11 @@ async function fromGlobal({ data, mapping, mappingStore, }) { data: result, }; } -function evaluateCalcExpression(expr, -// biome-ignore lint/suspicious/noExplicitAny: -context) { +function evaluateCalcExpression( + expr, + // biome-ignore lint/suspicious/noExplicitAny: + context, +) { const tokens = expr .split(/[^\w.]+/) .map((t) => t.trim()) @@ -188,19 +202,23 @@ context) { for (const token of tokens) { const value = getValueByPath(context, token); if (typeof value !== "undefined") { - resolvedExpr = resolvedExpr.replace(new RegExp(`\\b${token.replace(".", "\\.")}\\b`, "g"), value); + resolvedExpr = resolvedExpr.replace( + new RegExp(`\\b${token.replace(".", "\\.")}\\b`, "g"), + value, + ); } } try { return Function(`use strict"; return (${resolvedExpr})`)(); - } - catch { + } catch { return undefined; } } -async function toGlobal({ data, mapping, mappingStore, }) { +async function toGlobal({ data, mapping, mappingStore }) { const result = {}; - for (const [localKey, globalPathRaw] of Object.entries(mapping.localToUniversalMap)) { + for (const [localKey, globalPathRaw] of Object.entries( + mapping.localToUniversalMap, + )) { // biome-ignore lint/suspicious/noExplicitAny: let value; let targetKey = globalPathRaw; @@ -226,44 +244,43 @@ async function toGlobal({ data, mapping, mappingStore, }) { if (outerFn === "date") { const calcMatch = innerExpr.match(/^calc\((.+)\)$/); if (calcMatch) { - const calcResult = evaluateCalcExpression(calcMatch[1], data); + const calcResult = evaluateCalcExpression( + calcMatch[1], + data, + ); value = calcResult !== undefined ? new Date(calcResult).toISOString() : undefined; - } - else { + } else { const rawVal = getValueByPath(data, innerExpr); if (typeof rawVal === "number") { value = new Date(rawVal).toISOString(); - } - else if (rawVal?._seconds) { + } else if (rawVal?._seconds) { // Handle Firebase v8 timestamp format value = new Date(rawVal._seconds * 1000).toISOString(); - } - else if (rawVal?.seconds) { + } else if (rawVal?.seconds) { // Handle Firebase v9+ timestamp format value = new Date(rawVal.seconds * 1000).toISOString(); - } - else if (rawVal?.toDate && - typeof rawVal.toDate === "function") { + } else if ( + rawVal?.toDate && + typeof rawVal.toDate === "function" + ) { // Handle Firebase Timestamp objects value = rawVal.toDate().toISOString(); - } - else if (rawVal instanceof Date) { + } else if (rawVal instanceof Date) { value = rawVal.toISOString(); - } - else if (typeof rawVal === "string" && - rawVal.includes("UTC")) { + } else if ( + typeof rawVal === "string" && + rawVal.includes("UTC") + ) { // Handle Firebase timestamp strings like "August 18, 2025 at 10:03:19 AM UTC+5:30" value = new Date(rawVal).toISOString(); - } - else { + } else { value = undefined; } } - } - else if (outerFn === "calc") { + } else if (outerFn === "calc") { value = evaluateCalcExpression(innerExpr, data); } result[targetKey] = value; @@ -277,16 +294,13 @@ async function toGlobal({ data, mapping, mappingStore, }) { value = Array.isArray(refValue) ? refValue.map((v) => `@${v}`) : []; - } - else { + } else { value = refValue ? `@${refValue}` : undefined; } result[targetKey] = value; continue; } - let pathRef = globalPathRaw.includes(",") - ? globalPathRaw - : localKey; + let pathRef = globalPathRaw.includes(",") ? globalPathRaw : localKey; let tableRef = null; if (globalPathRaw.includes("(") && globalPathRaw.includes(")")) { pathRef = globalPathRaw.split("(")[1].split(")")[0]; @@ -298,9 +312,13 @@ async function toGlobal({ data, mapping, mappingStore, }) { value = getValueByPath(data, pathRef); if (tableRef) { if (Array.isArray(value)) { - value = await Promise.all(value.map(async (v) => (await mappingStore.getGlobalId(v)) ?? undefined)); - } - else { + value = await Promise.all( + value.map( + async (v) => + (await mappingStore.getGlobalId(v)) ?? undefined, + ), + ); + } else { value = (await mappingStore.getGlobalId(value)) ?? undefined; } } @@ -312,4 +330,4 @@ async function toGlobal({ data, mapping, mappingStore, }) { data: result, }; } -//# sourceMappingURL=mapper.js.map \ No newline at end of file +//# sourceMappingURL=mapper.js.map diff --git a/infrastructure/web3-adapter/src/mapper/mapper.types.js b/infrastructure/web3-adapter/src/mapper/mapper.types.js index ea11c714..5e6cd2c9 100644 --- a/infrastructure/web3-adapter/src/mapper/mapper.types.js +++ b/infrastructure/web3-adapter/src/mapper/mapper.types.js @@ -1,3 +1,3 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=mapper.types.js.map \ No newline at end of file +//# sourceMappingURL=mapper.types.js.map diff --git a/package.json b/package.json index 7d824959..4ea8df2d 100644 --- a/package.json +++ b/package.json @@ -10,7 +10,9 @@ "check-format": "turbo run check-format", "check": "turbo run check", "check-types": "turbo run check-types", - "dev:evault": "docker compose -f evault.docker-compose.yml up --watch" + "dev:docker": "docker compose -f dev-docker-compose.yaml up --watch", + "dev:docker:down": "docker compose -f dev-docker-compose.yaml down", + "dev:docker:all": "docker compose -f dev-docker-compose.yaml --profile all up --watch" }, "devDependencies": { "@biomejs/biome": "^1.9.4", @@ -45,4 +47,4 @@ "@types/react-dom": "18.3.1" } } -} +} \ No newline at end of file diff --git a/platforms/blabsy/next.config.ts b/platforms/blabsy/next.config.ts index 355d9da8..bfda7140 100644 --- a/platforms/blabsy/next.config.ts +++ b/platforms/blabsy/next.config.ts @@ -1,5 +1,5 @@ -import type { NextConfig } from "next"; -import path from "path"; +import type { NextConfig } from 'next'; +import path from 'path'; const nextConfig: NextConfig = { reactStrictMode: true, @@ -11,7 +11,7 @@ const nextConfig: NextConfig = { }, output: 'standalone', distDir: '.next', - outputFileTracingRoot: path.join(__dirname), + outputFileTracingRoot: path.join(__dirname) }; export default nextConfig; diff --git a/platforms/blabsy/src/components/chat/chat-window.tsx b/platforms/blabsy/src/components/chat/chat-window.tsx index ba51d750..140cabee 100644 --- a/platforms/blabsy/src/components/chat/chat-window.tsx +++ b/platforms/blabsy/src/components/chat/chat-window.tsx @@ -70,12 +70,14 @@ function MessageItem({ return (
{/* User Avatar and Name - Above the message */} {!isOwnMessage && showUserInfo && ( @@ -105,10 +107,11 @@ function MessageItem({ {/* Message Bubble */}
{showTime && message.createdAt?.toDate && (

{formatDistanceToNow(message.createdAt.toDate(), { addSuffix: true @@ -298,8 +302,8 @@ export function ChatWindow(): JSX.Element {

{getChatType(currentChat) === 'direct' ? otherUser?.name || - otherUser?.username || - otherParticipant + otherUser?.username || + otherParticipant : currentChat.name}

@@ -355,7 +359,7 @@ export function ChatWindow(): JSX.Element { const showTime = !nextMessage || nextMessage.senderId !== - message.senderId; + message.senderId; // Show user info if: // 1. It's a group chat AND @@ -363,11 +367,11 @@ export function ChatWindow(): JSX.Element { // 3. Previous message is from same sender but more than 5 minutes ago const showUserInfo = getChatType(currentChat) === - 'group' && + 'group' && !isOwnMessage && (!prevMessage || prevMessage.senderId !== - message.senderId || + message.senderId || (prevMessage.createdAt ?.toDate && message.createdAt?.toDate && @@ -375,11 +379,11 @@ export function ChatWindow(): JSX.Element { prevMessage.createdAt .toDate() .getTime() - - message.createdAt - .toDate() - .getTime() + message.createdAt + .toDate() + .getTime() ) > - 5 * 60 * 1000)); + 5 * 60 * 1000)); const userData = message.senderId ? participantsData[message.senderId] diff --git a/platforms/blabsy/src/components/common/maintenance-banner.tsx b/platforms/blabsy/src/components/common/maintenance-banner.tsx index 3f4b364d..7ad748e3 100644 --- a/platforms/blabsy/src/components/common/maintenance-banner.tsx +++ b/platforms/blabsy/src/components/common/maintenance-banner.tsx @@ -15,10 +15,12 @@ export function MaintenanceBanner(): JSX.Element | null { useEffect(() => { const fetchMotd = async () => { try { - const registryUrl = process.env.NEXT_PUBLIC_REGISTRY_URL || 'http://localhost:4321'; + const registryUrl = + process.env.NEXT_PUBLIC_REGISTRY_URL || + 'http://localhost:4321'; const response = await axios.get(`${registryUrl}/motd`); setMotd(response.data); - + // Check if this message has been dismissed if (response.data.status === 'maintenance') { const dismissed = localStorage.getItem(DISMISSED_KEY); @@ -69,4 +71,3 @@ export function MaintenanceBanner(): JSX.Element | null {

); } - diff --git a/platforms/blabsy/src/components/login/login-main.tsx b/platforms/blabsy/src/components/login/login-main.tsx index c1170cce..c1880cf7 100644 --- a/platforms/blabsy/src/components/login/login-main.tsx +++ b/platforms/blabsy/src/components/login/login-main.tsx @@ -45,7 +45,8 @@ export function LoginMain(): JSX.Element { if (typeof window === 'undefined' || typeof navigator === 'undefined') { return 'https://play.google.com/store/apps/details?id=foundation.metastate.eid_wallet'; } - const userAgent = navigator.userAgent || navigator.vendor || (window as any).opera; + const userAgent = + navigator.userAgent || navigator.vendor || (window as any).opera; if (/android/i.test(userAgent)) { return 'https://play.google.com/store/apps/details?id=foundation.metastate.eid_wallet'; } diff --git a/platforms/blabsy/src/components/sidebar/more-settings.tsx b/platforms/blabsy/src/components/sidebar/more-settings.tsx index 60404e1d..10b7de98 100644 --- a/platforms/blabsy/src/components/sidebar/more-settings.tsx +++ b/platforms/blabsy/src/components/sidebar/more-settings.tsx @@ -40,7 +40,7 @@ export function MoreSettings(): JSX.Element { group-focus-visible:ring-[#878a8c] dark:group-hover:bg-dark-primary/10 dark:group-focus-visible:ring-white xl:pr-5`, open && - 'bg-light-primary/10 dark:bg-dark-primary/10' + 'bg-light-primary/10 dark:bg-dark-primary/10' )} > diff --git a/platforms/blabsy/src/lib/context/chat-context.tsx b/platforms/blabsy/src/lib/context/chat-context.tsx index e2fae190..233af51e 100644 --- a/platforms/blabsy/src/lib/context/chat-context.tsx +++ b/platforms/blabsy/src/lib/context/chat-context.tsx @@ -102,20 +102,25 @@ export function ChatContextProvider({ chatsQuery, (snapshot) => { const chatsData = snapshot.docs.map((doc) => doc.data()); - + // Sort chats by last message timestamp (most recent first) const sortedChats = chatsData.sort((a, b) => { // If both have lastMessage, sort by timestamp if (a.lastMessage?.timestamp && b.lastMessage?.timestamp) { - return b.lastMessage.timestamp.toMillis() - a.lastMessage.timestamp.toMillis(); + return ( + b.lastMessage.timestamp.toMillis() - + a.lastMessage.timestamp.toMillis() + ); } // If only one has lastMessage, prioritize it - if (a.lastMessage?.timestamp && !b.lastMessage?.timestamp) return -1; - if (!a.lastMessage?.timestamp && b.lastMessage?.timestamp) return 1; + if (a.lastMessage?.timestamp && !b.lastMessage?.timestamp) + return -1; + if (!a.lastMessage?.timestamp && b.lastMessage?.timestamp) + return 1; // If neither has lastMessage, sort by updatedAt return b.updatedAt.toMillis() - a.updatedAt.toMillis(); }); - + setChats(sortedChats); setLoading(false); }, diff --git a/platforms/dreamSync/package.json b/platforms/dreamSync/package.json index 1dbdb7c0..ad3116b9 100644 --- a/platforms/dreamSync/package.json +++ b/platforms/dreamSync/package.json @@ -1,5 +1,5 @@ { - "name": "rest-express", + "name": "dreamsync", "version": "1.0.0", "type": "module", "license": "MIT", diff --git a/platforms/eReputation/package.json b/platforms/eReputation/package.json index d61682b7..0cb17527 100644 --- a/platforms/eReputation/package.json +++ b/platforms/eReputation/package.json @@ -1,5 +1,5 @@ { - "name": "rest-express", + "name": "eReputation", "version": "1.0.0", "type": "module", "license": "MIT", diff --git a/platforms/pictique/src/lib/fragments/MaintenanceBanner/MaintenanceBanner.svelte b/platforms/pictique/src/lib/fragments/MaintenanceBanner/MaintenanceBanner.svelte index 2902eb12..43532920 100644 --- a/platforms/pictique/src/lib/fragments/MaintenanceBanner/MaintenanceBanner.svelte +++ b/platforms/pictique/src/lib/fragments/MaintenanceBanner/MaintenanceBanner.svelte @@ -41,7 +41,7 @@ ⚠️ {motd.message}