diff --git a/README.md b/README.md index 48010b7a..df2b2568 100644 --- a/README.md +++ b/README.md @@ -1,145 +1,93 @@ - - -# MetaState Prototype - -## Progress Tracker - -| Project | Status | -| ------------------------------------------ | ----------- | -| [W3ID](./infrastructure/w3id/) | In Progress | -| [eID Wallet](./infrastructure/eid-wallet/) | In Progress | -| EVault Core | Planned | -| Web3 Adapter | Planned | - -## Documentation Links - -| Documentation | Description | Link | -| ---------------------------- | ------------------------------------------- | -------------------------------------------------------------------------- | -| MetaState Prototype | Main project README | [README.md](./README.md) | -| W3ID | Web 3 Identity System documentation | [W3ID README](./infrastructure/w3id/README.md) | -| eVault Core | Core eVault system documentation | [eVault Core README](./infrastructure/evault-core/README.md) | -| eVault Core W3ID Integration | W3ID integration details for eVault Core | [W3ID Integration](./infrastructure/evault-core/docs/w3id-integration.md) | -| eVault Provisioner | Provisioning eVault instances documentation | [eVault Provisioner README](./infrastructure/evault-provisioner/README.md) | -| Bug Report Template | GitHub issue template for bug reports | [Bug Report Template](./.github/ISSUE_TEMPLATE/bug-report.md) | - ## Project Structure ``` prototype/ -├─ .vscode/ -│ └─ settings.json ├─ infrastructure/ │ ├─ evault-core/ -│ │ └─ package.json -│ └─ w3id/ -│ └─ package.json -├─ packages/ -│ ├─ eslint-config/ -│ │ ├─ base.js -│ │ ├─ next.js -│ │ ├─ package.json -│ │ ├─ react-internal.js -│ │ └─ README.md -│ └─ typescript-config/ -│ ├─ base.json -│ ├─ nextjs.json -│ ├─ package.json -│ └─ react-library.json +│ ├─ w3id/ +│ └─ web3-adapter/ ├─ platforms/ -│ └─ .gitkeep -├─ services/ -│ ├─ ontology/ (MetaState Ontology Service) -│ │ └─ package.json -│ └─ web3-adapter/ (MetaState Web-3 Adapter Service) -│ └─ package.json -├─ .gitignore (Ignores files while upstream to repo) -├─ .npmrc (Dependency Manager Conf) -├─ package.json (Dependency Management) -├─ pnpm-lock.yaml (Reproducability) -├─ pnpm-workspace.yaml (Configures MonoRepo) -├─ README.md (This File) -└─ turbo.json (Configures TurboRepo) +│ ├─ registry/ +│ ├─ pictique-api/ +│ ├─ pictique/ +│ ├─ blabsy-w3ds-auth-api/ +│ ├─ blabsy/ +│ ├─ group-charter-manager-api/ +│ ├─ group-charter-manager/ +│ ├─ evoting-api/ +│ ├─ eVoting/ +│ ├─ dreamsync-api/ +│ ├─ cerberus/ +│ ├─ ereputation/ +│ └─ marketplace/ +├─ docker/ +│ └─ Dockerfile.* (Dedicated Dockerfiles for each service) +└─ dev-docker-compose.yaml (Docker Compose configuration) ``` diff --git a/dev-docker-compose.README.md b/dev-docker-compose.README.md deleted file mode 100644 index 4547e353..00000000 --- a/dev-docker-compose.README.md +++ /dev/null @@ -1,89 +0,0 @@ -# Dev Docker Compose - -This docker-compose file sets up the development environment for the Metastate project. - -## Core Services (Always Running) - -- **registry** - Runs on port 4321 -- **evault-core** - Runs on ports 3001 (Express/Provisioning) and 4000 (Fastify/GraphQL) -- **neo4j** - Runs on ports 7474 (HTTP) and 7687 (Bolt) for graph data storage -- **postgres** - Runs on port 5432 with multiple databases pre-created - -## Optional Platform Services - -Use Docker Compose profiles to enable optional platforms: - -### Available Profiles - -- `pictique` - Pictique API (port 1111) -- `evoting` - eVoting API (port 4000) -- `dreamsync` - DreamSync API (port 4001) -- `cerberus` - Cerberus (port 3002) -- `group-charter` - Group Charter Manager API (port 3003) -- `blabsy` - Blabsy W3DS Auth API (port 3000) -- `ereputation` - eReputation (port 5000) -- `marketplace` - Marketplace (port 5001) -- `all` - Enable all optional platforms at once - -## Usage - -### Start core services only: -```bash -docker compose -f dev-docker-compose.yaml up -``` - -### Start with specific platforms: -```bash -# Single platform -docker compose -f dev-docker-compose.yaml --profile pictique up - -# Multiple platforms -docker compose -f dev-docker-compose.yaml --profile pictique --profile evoting up - -# All platforms -docker compose -f dev-docker-compose.yaml --profile all up -``` - -### Background mode: -```bash -docker compose -f dev-docker-compose.yaml --profile pictique up -d -``` - -### Stop services: -```bash -docker compose -f dev-docker-compose.yaml down -``` - -### View logs: -```bash -# All services -docker compose -f dev-docker-compose.yaml logs -f - -# Specific service -docker compose -f dev-docker-compose.yaml logs -f registry -``` - -## Environment Variables - -Create a `.env` file in the project root with your configuration: - -```env -# Registry -REGISTRY_SHARED_SECRET=your-secret-here -PUBLIC_REGISTRY_URL=http://localhost:4321 - -# Database URLs (optional - defaults are provided) -REGISTRY_DATABASE_URL=postgresql://postgres:postgres@postgres:5432/registry -NEO4J_URI=bolt://neo4j:7687 -NEO4J_USER=neo4j -NEO4J_PASSWORD=neo4j -``` - -## Notes - -- All services mount the source code for hot-reload development -- Node modules are stored in Docker volumes to avoid host conflicts -- PostgreSQL automatically creates all required databases on first startup -- Services wait for database health checks before starting - - diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index 737f4192..f17b6dbc 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -1,11 +1,19 @@ version: '3.8' +x-common-host-access: &common-host-access + extra_hosts: + - "host.docker.internal:host-gateway" + services: - # Core Services - Always Running + # Core Services - Always Running (included in all profiles) registry: + profiles: + - socials + - charter-blabsy + - all build: context: . - dockerfile: ./Dockerfile.dev + dockerfile: ./docker/Dockerfile.registry ports: - "4321:4321" environment: @@ -17,12 +25,13 @@ services: - .:/app - node_modules_cache:/app/node_modules working_dir: /app/platforms/registry - command: sh -c "cd /app/platforms/registry && pnpm run dev" + command: sh -c "cd /app/platforms/registry && pnpm run migration:run 2>/dev/null || true && pnpm run dev" depends_on: postgres: condition: service_healthy networks: - metastate-network + <<: *common-host-access develop: watch: - action: restart @@ -33,6 +42,8 @@ services: path: ./infrastructure/w3id/src ignore: - node_modules + - action: restart + path: ./platforms/registry/src/migrations - action: rebuild path: ./platforms/registry/package.json - action: rebuild @@ -41,12 +52,16 @@ services: path: ./.env evault-core: + profiles: + - socials + - charter-blabsy + - all build: context: . - dockerfile: ./Dockerfile.dev + dockerfile: ./docker/Dockerfile.evault-core ports: - - "3001:3001" # Express (provisioning API) - - "4000:4000" # Fastify (GraphQL/HTTP) + - "3001:3001" # Express (provisioning API) + - "4000:4000" # Fastify (GraphQL/HTTP) environment: - NODE_ENV=development - EXPRESS_PORT=3001 @@ -61,8 +76,9 @@ services: volumes: - .:/app - node_modules_cache:/app/node_modules + - evault_core_node_modules:/app/infrastructure/evault-core/node_modules working_dir: /app/infrastructure/evault-core - command: sh -c "cd /app/infrastructure/evault-core && pnpm run dev" + command: sh -c "cd /app/infrastructure/evault-core && pnpm install && pnpm run migration:run 2>/dev/null || true && pnpm run dev" depends_on: postgres: condition: service_healthy @@ -72,16 +88,20 @@ services: condition: service_started networks: - metastate-network + <<: *common-host-access develop: watch: - action: restart path: ./infrastructure/evault-core/src ignore: - node_modules + - migrations - action: restart path: ./infrastructure/w3id/src ignore: - node_modules + - action: restart + path: ./infrastructure/evault-core/src/migrations - action: rebuild path: ./infrastructure/evault-core/package.json - action: rebuild @@ -91,24 +111,33 @@ services: # Neo4j for evault-core graph data neo4j: + profiles: + - socials + - charter-blabsy + - all image: neo4j:5.15 ports: - - "7474:7474" # HTTP - - "7687:7687" # Bolt + - "7474:7474" # HTTP + - "7687:7687" # Bolt environment: - NEO4J_AUTH=${NEO4J_USER:-neo4j}/${NEO4J_PASSWORD:-neo4j} volumes: - neo4j_data:/var/lib/neo4j/data networks: - metastate-network + <<: *common-host-access healthcheck: - test: ["CMD-SHELL", "cypher-shell -u neo4j -p ${NEO4J_PASSWORD:-neo4j} 'RETURN 1' || exit 1"] + test: [ "CMD-SHELL", "cypher-shell -u neo4j -p ${NEO4J_PASSWORD:-neo4j} 'RETURN 1' || exit 1" ] interval: 10s timeout: 5s retries: 5 # Database for services postgres: + profiles: + - socials + - charter-blabsy + - all image: postgres:15-alpine ports: - "5432:5432" @@ -121,22 +150,26 @@ services: - ./db/init-multiple-databases.sh:/docker-entrypoint-initdb.d/init-multiple-databases.sh networks: - metastate-network + <<: *common-host-access healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: [ "CMD-SHELL", "pg_isready -U postgres" ] interval: 10s timeout: 5s retries: 5 + logging: + driver: "none" # Optional Platform Services - Use profiles to enable - + # Pictique API pictique-api: profiles: - pictique + - socials - all build: context: . - dockerfile: ./Dockerfile.dev + dockerfile: ./docker/Dockerfile.pictique-api ports: - "1111:1111" environment: @@ -145,11 +178,13 @@ services: - DATABASE_URL=${PICTIQUE_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/pictique} - PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://registry:4321} - PUBLIC_PICTIQUE_BASE_URL=${PUBLIC_PICTIQUE_BASE_URL:-http://localhost:1111} + - PICTIQUE_MAPPING_DB_PATH=${PICTIQUE_MAPPING_DB_PATH:-/app/data/mapping-dbs/pictique} volumes: - .:/app - node_modules_cache:/app/node_modules + - mapping_db_data:/app/data/mapping-dbs working_dir: /app/platforms/pictique-api - command: pnpm run dev + command: sh -c "pnpm run migration:run 2>/dev/null || true && pnpm run dev" depends_on: postgres: condition: service_healthy @@ -157,6 +192,11 @@ services: condition: service_started networks: - metastate-network + <<: *common-host-access + develop: + watch: + - action: restart + path: ./platforms/pictique-api/src/database/migrations # eVoting API evoting-api: @@ -165,20 +205,22 @@ services: - all build: context: . - dockerfile: ./Dockerfile.dev + dockerfile: ./docker/Dockerfile.evoting-api ports: - - "4000:4000" + - "4002:4000" environment: - NODE_ENV=development - PORT=4000 - DATABASE_URL=${EVOTING_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/evoting} - PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://registry:4321} - - PUBLIC_EVOTING_BASE_URL=${PUBLIC_EVOTING_BASE_URL:-http://localhost:4000} + - PUBLIC_EVOTING_BASE_URL=${PUBLIC_EVOTING_BASE_URL:-http://localhost:4002} + - EVOTING_MAPPING_DB_PATH=${EVOTING_MAPPING_DB_PATH:-/app/data/mapping-dbs/evoting} volumes: - .:/app - node_modules_cache:/app/node_modules + - mapping_db_data:/app/data/mapping-dbs working_dir: /app/platforms/evoting-api - command: pnpm run dev + command: sh -c "pnpm run migration:run 2>/dev/null || true && pnpm run dev" depends_on: postgres: condition: service_healthy @@ -186,6 +228,11 @@ services: condition: service_started networks: - metastate-network + <<: *common-host-access + develop: + watch: + - action: restart + path: ./platforms/evoting-api/src/database/migrations # DreamSync API dreamsync-api: @@ -194,7 +241,7 @@ services: - all build: context: . - dockerfile: ./Dockerfile.dev + dockerfile: ./docker/Dockerfile.dreamsync-api ports: - "4001:4001" environment: @@ -204,11 +251,13 @@ services: - PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://registry:4321} - DREAMSYNC_CLIENT_URL=${DREAMSYNC_CLIENT_URL:-http://localhost:4001} - JWT_SECRET=${JWT_SECRET:-dev-jwt-secret} + - DREAMSYNC_MAPPING_DB_PATH=${DREAMSYNC_MAPPING_DB_PATH:-/app/data/mapping-dbs/dreamsync} volumes: - .:/app - node_modules_cache:/app/node_modules + - mapping_db_data:/app/data/mapping-dbs working_dir: /app/platforms/dreamsync-api - command: pnpm run dev + command: sh -c "pnpm run migration:run 2>/dev/null || true && pnpm run dev" depends_on: postgres: condition: service_healthy @@ -216,15 +265,21 @@ services: condition: service_started networks: - metastate-network + <<: *common-host-access + develop: + watch: + - action: restart + path: ./platforms/dreamsync-api/src/database/migrations # Cerberus cerberus: profiles: - cerberus + - charter-blabsy - all build: context: . - dockerfile: ./Dockerfile.dev + dockerfile: ./docker/Dockerfile.cerberus ports: - "3002:3002" environment: @@ -233,11 +288,13 @@ services: - DATABASE_URL=${CERBERUS_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/cerberus} - PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://registry:4321} - PUBLIC_CERBERUS_BASE_URL=${PUBLIC_CERBERUS_BASE_URL:-http://localhost:3002} + - CERBERUS_MAPPING_DB_PATH=${CERBERUS_MAPPING_DB_PATH:-/app/data/mapping-dbs/cerberus} volumes: - .:/app - node_modules_cache:/app/node_modules + - mapping_db_data:/app/data/mapping-dbs working_dir: /app/platforms/cerberus - command: pnpm run dev + command: sh -c "pnpm run migration:run 2>/dev/null || true && pnpm run dev" depends_on: postgres: condition: service_healthy @@ -245,15 +302,23 @@ services: condition: service_started networks: - metastate-network + <<: *common-host-access + develop: + watch: + - action: restart + path: ./platforms/cerberus/src/database/migrations + - action: restart + path: ./platforms/cerberus/src/migrations # Group Charter Manager API group-charter-manager-api: profiles: - group-charter + - charter-blabsy - all build: context: . - dockerfile: ./Dockerfile.dev + dockerfile: ./docker/Dockerfile.group-charter-manager-api ports: - "3003:3003" environment: @@ -262,11 +327,13 @@ services: - DATABASE_URL=${GROUP_CHARTER_DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/group_charter_manager} - PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://registry:4321} - PUBLIC_GROUP_CHARTER_BASE_URL=${PUBLIC_GROUP_CHARTER_BASE_URL:-http://localhost:3003} + - GROUP_CHARTER_MAPPING_DB_PATH=${GROUP_CHARTER_MAPPING_DB_PATH:-/app/data/mapping-dbs/group-charter} volumes: - .:/app - node_modules_cache:/app/node_modules + - mapping_db_data:/app/data/mapping-dbs working_dir: /app/platforms/group-charter-manager-api - command: pnpm run dev + command: sh -c "pnpm run migration:run 2>/dev/null || true && pnpm run dev" depends_on: postgres: condition: service_healthy @@ -274,15 +341,22 @@ services: condition: service_started networks: - metastate-network + <<: *common-host-access + develop: + watch: + - action: restart + path: ./platforms/group-charter-manager-api/src/database/migrations # Blabsy W3DS Auth API blabsy-w3ds-auth-api: profiles: - blabsy + - socials + - charter-blabsy - all build: context: . - dockerfile: ./Dockerfile.dev + dockerfile: ./docker/Dockerfile.blabsy-w3ds-auth-api ports: - "3000:3000" environment: @@ -290,16 +364,25 @@ services: - PORT=3000 - PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://registry:4321} - PUBLIC_BLABSY_BASE_URL=${PUBLIC_BLABSY_BASE_URL:-http://localhost:3000} + - BLABSY_MAPPING_DB_PATH=${BLABSY_MAPPING_DB_PATH:-/app/data/mapping-dbs/blabsy} + - GOOGLE_APPLICATION_CREDENTIALS=${GOOGLE_APPLICATION_CREDENTIALS:-} + - FIREBASE_CREDENTIALS_PATH=${FIREBASE_CREDENTIALS_PATH:-} volumes: - .:/app - node_modules_cache:/app/node_modules + - mapping_db_data:/app/data/mapping-dbs working_dir: /app/platforms/blabsy-w3ds-auth-api - command: pnpm run dev + command: sh -c "pnpm run migration:run 2>/dev/null || true && pnpm run dev" depends_on: registry: condition: service_started networks: - metastate-network + <<: *common-host-access + develop: + watch: + - action: restart + path: ./platforms/blabsy-w3ds-auth-api/src/database/migrations # eReputation ereputation: @@ -308,7 +391,7 @@ services: - all build: context: . - dockerfile: ./Dockerfile.dev + dockerfile: ./docker/Dockerfile.ereputation ports: - "5000:5000" environment: @@ -328,6 +411,7 @@ services: condition: service_started networks: - metastate-network + <<: *common-host-access # Marketplace marketplace: @@ -336,7 +420,7 @@ services: - all build: context: . - dockerfile: ./Dockerfile.dev + dockerfile: ./docker/Dockerfile.marketplace ports: - "5001:5001" environment: @@ -353,13 +437,134 @@ services: condition: service_started networks: - metastate-network + <<: *common-host-access + + # Frontend Services + + # Blabsy Frontend + blabsy: + profiles: + - blabsy + - socials + - charter-blabsy + - all + build: + context: . + dockerfile: ./docker/Dockerfile.blabsy + ports: + - "8080:8080" + environment: + - NODE_ENV=development + - NEXT_PUBLIC_BASE_URL=${PUBLIC_BLABSY_BASE_URL:-http://localhost:3000} + - NEXT_PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://localhost:4321} + volumes: + - .:/app + - node_modules_cache:/app/node_modules + working_dir: /app/platforms/blabsy + command: pnpm run dev + depends_on: + blabsy-w3ds-auth-api: + condition: service_started + registry: + condition: service_started + networks: + - metastate-network + <<: *common-host-access + + # Pictique Frontend + pictique: + profiles: + - pictique + - socials + - all + build: + context: . + dockerfile: ./docker/Dockerfile.pictique + ports: + - "5173:5173" + environment: + - NODE_ENV=development + - PUBLIC_PICTIQUE_BASE_URL=${PUBLIC_PICTIQUE_BASE_URL:-http://localhost:1111} + - PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://localhost:4321} + volumes: + - .:/app + - node_modules_cache:/app/node_modules + working_dir: /app/platforms/pictique + command: pnpm run dev + depends_on: + pictique-api: + condition: service_started + registry: + condition: service_started + networks: + - metastate-network + <<: *common-host-access + + # Group Charter Manager Frontend + group-charter-manager: + profiles: + - group-charter + - charter-blabsy + - all + build: + context: . + dockerfile: ./docker/Dockerfile.group-charter-manager + ports: + - "3004:3004" + environment: + - NODE_ENV=development + - NEXT_PUBLIC_GROUP_CHARTER_BASE_URL=${PUBLIC_GROUP_CHARTER_BASE_URL:-http://localhost:3003} + - NEXT_PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://localhost:4321} + volumes: + - .:/app + - node_modules_cache:/app/node_modules + working_dir: /app/platforms/group-charter-manager + command: sh -c "next dev -p 3004" + depends_on: + group-charter-manager-api: + condition: service_started + registry: + condition: service_started + networks: + - metastate-network + <<: *common-host-access + + # eVoting Frontend + evoting: + profiles: + - evoting + - all + build: + context: . + dockerfile: ./docker/Dockerfile.eVoting + ports: + - "3005:3005" + environment: + - NODE_ENV=development + - NEXT_PUBLIC_EVOTING_BASE_URL=${PUBLIC_EVOTING_BASE_URL:-http://localhost:4002} + - NEXT_PUBLIC_REGISTRY_URL=${PUBLIC_REGISTRY_URL:-http://localhost:4321} + volumes: + - .:/app + - node_modules_cache:/app/node_modules + working_dir: /app/platforms/eVoting + command: sh -c "next dev --turbopack -p 3005" + depends_on: + evoting-api: + condition: service_started + registry: + condition: service_started + networks: + - metastate-network + <<: *common-host-access volumes: postgres_data: neo4j_data: node_modules_cache: + mapping_db_data: + evault_core_node_modules: + networks: metastate-network: driver: bridge - diff --git a/docker/Dockerfile.blabsy b/docker/Dockerfile.blabsy new file mode 100644 index 00000000..65f9feb9 --- /dev/null +++ b/docker/Dockerfile.blabsy @@ -0,0 +1,27 @@ +FROM node:18-alpine AS base +RUN apk update && apk add --no-cache libc6-compat +WORKDIR /app + +# --- +FROM base AS prepare +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +RUN npm install -g turbo@^2 +COPY . . +RUN turbo prune blabsy --docker + +# --- +FROM base AS runner +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Copy workspace config +COPY --from=prepare /app/pnpm-workspace.yaml ./ +COPY --from=prepare /app/package.json ./ +# Copy pruned workspace +COPY --from=prepare /app/out/json/ . +# Install dependencies (build will happen at runtime with volumes) +RUN pnpm install --frozen-lockfile +COPY --from=prepare /app/out/full/ . + +WORKDIR /app/platforms/blabsy +EXPOSE 8080 +CMD ["pnpm", "dev"] + diff --git a/docker/Dockerfile.blabsy-w3ds-auth-api b/docker/Dockerfile.blabsy-w3ds-auth-api new file mode 100644 index 00000000..0ef1a77e --- /dev/null +++ b/docker/Dockerfile.blabsy-w3ds-auth-api @@ -0,0 +1,57 @@ +FROM node:18-alpine AS base +RUN apk update && apk add --no-cache libc6-compat +WORKDIR /app + +# --- +FROM base AS prepare +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +RUN npm install -g turbo@^2 +COPY . . +# Generate a partial monorepo with a pruned lockfile for blabsy-w3ds-auth-api +RUN turbo prune blabsy-w3ds-auth-api --docker + +# --- +FROM base AS builder +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Copy workspace config first so pnpm recognizes workspace packages +COPY --from=prepare /app/pnpm-workspace.yaml ./ +COPY --from=prepare /app/package.json ./ +# Copy infrastructure folder (before install) so postinstall scripts can find tsconfig files +# web3-adapter depends on evault-core, which depends on w3id +COPY --from=prepare /app/infrastructure/w3id ./infrastructure/w3id +COPY --from=prepare /app/infrastructure/evault-core ./infrastructure/evault-core +COPY --from=prepare /app/infrastructure/web3-adapter ./infrastructure/web3-adapter +# First install the dependencies (as they change less often) +# Use --no-frozen-lockfile because w3id dependencies aren't in the pruned lockfile +COPY --from=prepare /app/out/json/ . +RUN pnpm install --no-frozen-lockfile +# Build the project +COPY --from=prepare /app/out/full/ . +# Install dependencies for workspace packages (they need to be recognized as workspace packages) +# Use --no-frozen-lockfile because these weren't in the pruned lockfile +RUN pnpm install --no-frozen-lockfile +# Build workspace dependencies in order, then the main package +RUN pnpm turbo build --filter=w3id && pnpm turbo build --filter=evault-core && pnpm turbo build --filter=web3-adapter && pnpm turbo build --filter=blabsy-w3ds-auth-api + +# --- +FROM base AS runner +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Create parent directory structure for SQLite databases (must exist before volume mount) +RUN mkdir -p /app/data/mapping-dbs/blabsy +# Copy entrypoint script +COPY --from=prepare /app/docker/entrypoint.sh /usr/local/bin/entrypoint.sh +RUN chmod +x /usr/local/bin/entrypoint.sh +# Copy built application +COPY --from=builder /app/platforms/blabsy-w3ds-auth-api/dist ./platforms/blabsy-w3ds-auth-api/dist +COPY --from=builder /app/platforms/blabsy-w3ds-auth-api/package.json ./platforms/blabsy-w3ds-auth-api/ +COPY --from=builder /app/platforms/blabsy-w3ds-auth-api/node_modules ./platforms/blabsy-w3ds-auth-api/node_modules +COPY --from=builder /app/infrastructure ./infrastructure +COPY --from=builder /app/node_modules ./node_modules +COPY --from=builder /app/package.json ./ +COPY --from=builder /app/pnpm-workspace.yaml ./ + +WORKDIR /app/platforms/blabsy-w3ds-auth-api +EXPOSE 3000 +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] +CMD ["pnpm", "start"] + diff --git a/docker/Dockerfile.cerberus b/docker/Dockerfile.cerberus new file mode 100644 index 00000000..10fff8a3 --- /dev/null +++ b/docker/Dockerfile.cerberus @@ -0,0 +1,57 @@ +FROM node:18-alpine AS base +RUN apk update && apk add --no-cache libc6-compat +WORKDIR /app + +# --- +FROM base AS prepare +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +RUN npm install -g turbo@^2 +COPY . . +# Generate a partial monorepo with a pruned lockfile for cerberus +RUN turbo prune cerberus --docker + +# --- +FROM base AS builder +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Copy workspace config first so pnpm recognizes workspace packages +COPY --from=prepare /app/pnpm-workspace.yaml ./ +COPY --from=prepare /app/package.json ./ +# Copy infrastructure folder (before install) so postinstall scripts can find tsconfig files +# web3-adapter depends on evault-core, which depends on w3id +COPY --from=prepare /app/infrastructure/w3id ./infrastructure/w3id +COPY --from=prepare /app/infrastructure/evault-core ./infrastructure/evault-core +COPY --from=prepare /app/infrastructure/web3-adapter ./infrastructure/web3-adapter +# First install the dependencies (as they change less often) +# Use --no-frozen-lockfile because w3id dependencies aren't in the pruned lockfile +COPY --from=prepare /app/out/json/ . +RUN pnpm install --no-frozen-lockfile +# Build the project +COPY --from=prepare /app/out/full/ . +# Install dependencies for workspace packages (they need to be recognized as workspace packages) +# Use --no-frozen-lockfile because these weren't in the pruned lockfile +RUN pnpm install --no-frozen-lockfile +# Build workspace dependencies in order, then the main package +RUN pnpm turbo build --filter=w3id && pnpm turbo build --filter=evault-core && pnpm turbo build --filter=web3-adapter && pnpm turbo build --filter=cerberus + +# --- +FROM base AS runner +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Create parent directory structure for SQLite databases (must exist before volume mount) +RUN mkdir -p /app/data/mapping-dbs/cerberus +# Copy entrypoint script +COPY --from=prepare /app/docker/entrypoint.sh /usr/local/bin/entrypoint.sh +RUN chmod +x /usr/local/bin/entrypoint.sh +# Copy built application +COPY --from=builder /app/platforms/cerberus/dist ./platforms/cerberus/dist +COPY --from=builder /app/platforms/cerberus/package.json ./platforms/cerberus/ +COPY --from=builder /app/platforms/cerberus/node_modules ./platforms/cerberus/node_modules +COPY --from=builder /app/infrastructure ./infrastructure +COPY --from=builder /app/node_modules ./node_modules +COPY --from=builder /app/package.json ./ +COPY --from=builder /app/pnpm-workspace.yaml ./ + +WORKDIR /app/platforms/cerberus +EXPOSE 3002 +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] +CMD ["pnpm", "start"] + diff --git a/docker/Dockerfile.dreamsync-api b/docker/Dockerfile.dreamsync-api new file mode 100644 index 00000000..3d3a8b12 --- /dev/null +++ b/docker/Dockerfile.dreamsync-api @@ -0,0 +1,57 @@ +FROM node:18-alpine AS base +RUN apk update && apk add --no-cache libc6-compat +WORKDIR /app + +# --- +FROM base AS prepare +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +RUN npm install -g turbo@^2 +COPY . . +# Generate a partial monorepo with a pruned lockfile for dreamsync-api +RUN turbo prune dreamsync-api --docker + +# --- +FROM base AS builder +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Copy workspace config first so pnpm recognizes workspace packages +COPY --from=prepare /app/pnpm-workspace.yaml ./ +COPY --from=prepare /app/package.json ./ +# Copy infrastructure folder (before install) so postinstall scripts can find tsconfig files +# web3-adapter depends on evault-core, which depends on w3id +COPY --from=prepare /app/infrastructure/w3id ./infrastructure/w3id +COPY --from=prepare /app/infrastructure/evault-core ./infrastructure/evault-core +COPY --from=prepare /app/infrastructure/web3-adapter ./infrastructure/web3-adapter +# First install the dependencies (as they change less often) +# Use --no-frozen-lockfile because w3id dependencies aren't in the pruned lockfile +COPY --from=prepare /app/out/json/ . +RUN pnpm install --no-frozen-lockfile +# Build the project +COPY --from=prepare /app/out/full/ . +# Install dependencies for workspace packages (they need to be recognized as workspace packages) +# Use --no-frozen-lockfile because these weren't in the pruned lockfile +RUN pnpm install --no-frozen-lockfile +# Build workspace dependencies in order, then the main package +RUN pnpm turbo build --filter=w3id && pnpm turbo build --filter=evault-core && pnpm turbo build --filter=web3-adapter && pnpm turbo build --filter=dreamsync-api + +# --- +FROM base AS runner +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Create parent directory structure for SQLite databases (must exist before volume mount) +RUN mkdir -p /app/data/mapping-dbs/dreamsync +# Copy entrypoint script +COPY --from=prepare /app/docker/entrypoint.sh /usr/local/bin/entrypoint.sh +RUN chmod +x /usr/local/bin/entrypoint.sh +# Copy built application +COPY --from=builder /app/platforms/dreamsync-api/dist ./platforms/dreamsync-api/dist +COPY --from=builder /app/platforms/dreamsync-api/package.json ./platforms/dreamsync-api/ +COPY --from=builder /app/platforms/dreamsync-api/node_modules ./platforms/dreamsync-api/node_modules +COPY --from=builder /app/infrastructure ./infrastructure +COPY --from=builder /app/node_modules ./node_modules +COPY --from=builder /app/package.json ./ +COPY --from=builder /app/pnpm-workspace.yaml ./ + +WORKDIR /app/platforms/dreamsync-api +EXPOSE 4001 +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] +CMD ["pnpm", "start"] + diff --git a/docker/Dockerfile.eVoting b/docker/Dockerfile.eVoting new file mode 100644 index 00000000..e9bcde1f --- /dev/null +++ b/docker/Dockerfile.eVoting @@ -0,0 +1,27 @@ +FROM node:18-alpine AS base +RUN apk update && apk add --no-cache libc6-compat +WORKDIR /app + +# --- +FROM base AS prepare +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +RUN npm install -g turbo@^2 +COPY . . +RUN turbo prune evoting --docker + +# --- +FROM base AS runner +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Copy workspace config +COPY --from=prepare /app/pnpm-workspace.yaml ./ +COPY --from=prepare /app/package.json ./ +# Copy pruned workspace +COPY --from=prepare /app/out/json/ . +# Install dependencies (build will happen at runtime with volumes) +RUN pnpm install --frozen-lockfile +COPY --from=prepare /app/out/full/ . + +WORKDIR /app/platforms/eVoting +EXPOSE 3005 +CMD ["pnpm", "dev"] + diff --git a/docker/Dockerfile.ereputation b/docker/Dockerfile.ereputation new file mode 100644 index 00000000..06c945a6 --- /dev/null +++ b/docker/Dockerfile.ereputation @@ -0,0 +1,37 @@ +FROM node:18-alpine AS base +RUN apk update && apk add --no-cache libc6-compat +WORKDIR /app + +# --- +FROM base AS prepare +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +RUN npm install -g turbo@^2 +COPY . . +# Generate a partial monorepo with a pruned lockfile for eReputation +RUN turbo prune eReputation --docker + +# --- +FROM base AS builder +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# First install the dependencies (as they change less often) +COPY --from=prepare /app/out/json/ . +RUN pnpm install --frozen-lockfile +# Build the project +COPY --from=prepare /app/out/full/ . +RUN pnpm turbo build --filter=eReputation + +# --- +FROM base AS runner +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Copy built application +COPY --from=builder /app/platforms/eReputation/dist ./platforms/eReputation/dist +COPY --from=builder /app/platforms/eReputation/package.json ./platforms/eReputation/ +COPY --from=builder /app/platforms/eReputation/node_modules ./platforms/eReputation/node_modules +COPY --from=builder /app/node_modules ./node_modules +COPY --from=builder /app/package.json ./ +COPY --from=builder /app/pnpm-workspace.yaml ./ + +WORKDIR /app/platforms/eReputation +EXPOSE 5000 +CMD ["pnpm", "start"] + diff --git a/docker/Dockerfile.evault-core b/docker/Dockerfile.evault-core new file mode 100644 index 00000000..df49d317 --- /dev/null +++ b/docker/Dockerfile.evault-core @@ -0,0 +1,50 @@ +FROM node:18-alpine AS base +RUN apk update && apk add --no-cache libc6-compat +WORKDIR /app + +# --- +FROM base AS prepare +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +RUN npm install -g turbo@^2 +COPY . . +# Generate a partial monorepo with a pruned lockfile for evault-core +RUN turbo prune evault-core --docker + +# --- +FROM base AS builder +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Copy workspace config first so pnpm recognizes workspace packages +COPY --from=prepare /app/pnpm-workspace.yaml ./ +COPY --from=prepare /app/package.json ./ +# Copy infrastructure folder (before install) so postinstall scripts can find tsconfig files +# evault-core depends on w3id and web3-adapter +COPY --from=prepare /app/infrastructure/w3id ./infrastructure/w3id +COPY --from=prepare /app/infrastructure/web3-adapter ./infrastructure/web3-adapter +# First install the dependencies (as they change less often) +# Use --no-frozen-lockfile because w3id dependencies aren't in the pruned lockfile +COPY --from=prepare /app/out/json/ . +RUN pnpm install --no-frozen-lockfile +# Build the project +COPY --from=prepare /app/out/full/ . +# Install dependencies for workspace packages (they need to be recognized as workspace packages) +# Use --no-frozen-lockfile because these weren't in the pruned lockfile +RUN pnpm install --no-frozen-lockfile +# Build workspace dependencies in order, then the main package +RUN pnpm turbo build --filter=w3id && pnpm turbo build --filter=web3-adapter && pnpm turbo build --filter=evault-core + +# --- +FROM base AS runner +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Copy built application +COPY --from=builder /app/infrastructure/evault-core/dist ./infrastructure/evault-core/dist +COPY --from=builder /app/infrastructure/evault-core/package.json ./infrastructure/evault-core/ +COPY --from=builder /app/infrastructure/evault-core/node_modules ./infrastructure/evault-core/node_modules +COPY --from=builder /app/infrastructure ./infrastructure +COPY --from=builder /app/node_modules ./node_modules +COPY --from=builder /app/package.json ./ +COPY --from=builder /app/pnpm-workspace.yaml ./ + +WORKDIR /app/infrastructure/evault-core +EXPOSE 3001 4000 +CMD ["pnpm", "start"] + diff --git a/docker/Dockerfile.evoting-api b/docker/Dockerfile.evoting-api new file mode 100644 index 00000000..4636f24a --- /dev/null +++ b/docker/Dockerfile.evoting-api @@ -0,0 +1,59 @@ +FROM node:18-alpine AS base +RUN apk update && apk add --no-cache libc6-compat +WORKDIR /app + +# --- +FROM base AS prepare +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +RUN npm install -g turbo@^2 +COPY . . +# Generate a partial monorepo with a pruned lockfile for evoting-api +RUN turbo prune evoting-api --docker + +# --- +FROM base AS builder +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Copy workspace config first so pnpm recognizes workspace packages +COPY --from=prepare /app/pnpm-workspace.yaml ./ +COPY --from=prepare /app/package.json ./ +# Copy infrastructure folder (before install) so postinstall scripts can find tsconfig files +# web3-adapter depends on evault-core, which depends on w3id +# evoting-api also depends on blindvote +COPY --from=prepare /app/infrastructure/w3id ./infrastructure/w3id +COPY --from=prepare /app/infrastructure/evault-core ./infrastructure/evault-core +COPY --from=prepare /app/infrastructure/web3-adapter ./infrastructure/web3-adapter +COPY --from=prepare /app/infrastructure/blindvote ./infrastructure/blindvote +# First install the dependencies (as they change less often) +# Use --no-frozen-lockfile because w3id dependencies aren't in the pruned lockfile +COPY --from=prepare /app/out/json/ . +RUN pnpm install --no-frozen-lockfile +# Build the project +COPY --from=prepare /app/out/full/ . +# Install dependencies for workspace packages (they need to be recognized as workspace packages) +# Use --no-frozen-lockfile because these weren't in the pruned lockfile +RUN pnpm install --no-frozen-lockfile +# Build workspace dependencies in order, then the main package +RUN pnpm turbo build --filter=w3id && pnpm turbo build --filter=evault-core && pnpm turbo build --filter=web3-adapter && pnpm turbo build --filter=blindvote && pnpm turbo build --filter=evoting-api + +# --- +FROM base AS runner +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Create parent directory structure for SQLite databases (must exist before volume mount) +RUN mkdir -p /app/data/mapping-dbs/evoting +# Copy entrypoint script +COPY --from=prepare /app/docker/entrypoint.sh /usr/local/bin/entrypoint.sh +RUN chmod +x /usr/local/bin/entrypoint.sh +# Copy built application +COPY --from=builder /app/platforms/evoting-api/dist ./platforms/evoting-api/dist +COPY --from=builder /app/platforms/evoting-api/package.json ./platforms/evoting-api/ +COPY --from=builder /app/platforms/evoting-api/node_modules ./platforms/evoting-api/node_modules +COPY --from=builder /app/infrastructure ./infrastructure +COPY --from=builder /app/node_modules ./node_modules +COPY --from=builder /app/package.json ./ +COPY --from=builder /app/pnpm-workspace.yaml ./ + +WORKDIR /app/platforms/evoting-api +EXPOSE 4000 +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] +CMD ["pnpm", "start"] + diff --git a/docker/Dockerfile.group-charter-manager b/docker/Dockerfile.group-charter-manager new file mode 100644 index 00000000..654ea5f0 --- /dev/null +++ b/docker/Dockerfile.group-charter-manager @@ -0,0 +1,27 @@ +FROM node:18-alpine AS base +RUN apk update && apk add --no-cache libc6-compat +WORKDIR /app + +# --- +FROM base AS prepare +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +RUN npm install -g turbo@^2 +COPY . . +RUN turbo prune group-charter-manager --docker + +# --- +FROM base AS runner +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Copy workspace config +COPY --from=prepare /app/pnpm-workspace.yaml ./ +COPY --from=prepare /app/package.json ./ +# Copy pruned workspace +COPY --from=prepare /app/out/json/ . +# Install dependencies (build will happen at runtime with volumes) +RUN pnpm install --frozen-lockfile +COPY --from=prepare /app/out/full/ . + +WORKDIR /app/platforms/group-charter-manager +EXPOSE 3004 +CMD ["pnpm", "dev"] + diff --git a/docker/Dockerfile.group-charter-manager-api b/docker/Dockerfile.group-charter-manager-api new file mode 100644 index 00000000..4f930ffe --- /dev/null +++ b/docker/Dockerfile.group-charter-manager-api @@ -0,0 +1,57 @@ +FROM node:18-alpine AS base +RUN apk update && apk add --no-cache libc6-compat +WORKDIR /app + +# --- +FROM base AS prepare +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +RUN npm install -g turbo@^2 +COPY . . +# Generate a partial monorepo with a pruned lockfile for group-charter-manager-api +RUN turbo prune group-charter-manager-api --docker + +# --- +FROM base AS builder +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Copy workspace config first so pnpm recognizes workspace packages +COPY --from=prepare /app/pnpm-workspace.yaml ./ +COPY --from=prepare /app/package.json ./ +# Copy infrastructure folder (before install) so postinstall scripts can find tsconfig files +# web3-adapter depends on evault-core, which depends on w3id +COPY --from=prepare /app/infrastructure/w3id ./infrastructure/w3id +COPY --from=prepare /app/infrastructure/evault-core ./infrastructure/evault-core +COPY --from=prepare /app/infrastructure/web3-adapter ./infrastructure/web3-adapter +# First install the dependencies (as they change less often) +# Use --no-frozen-lockfile because w3id dependencies aren't in the pruned lockfile +COPY --from=prepare /app/out/json/ . +RUN pnpm install --no-frozen-lockfile +# Build the project +COPY --from=prepare /app/out/full/ . +# Install dependencies for workspace packages (they need to be recognized as workspace packages) +# Use --no-frozen-lockfile because these weren't in the pruned lockfile +RUN pnpm install --no-frozen-lockfile +# Build workspace dependencies in order, then the main package +RUN pnpm turbo build --filter=w3id && pnpm turbo build --filter=evault-core && pnpm turbo build --filter=web3-adapter && pnpm turbo build --filter=group-charter-manager-api + +# --- +FROM base AS runner +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Create parent directory structure for SQLite databases (must exist before volume mount) +RUN mkdir -p /app/data/mapping-dbs/group-charter +# Copy entrypoint script +COPY --from=prepare /app/docker/entrypoint.sh /usr/local/bin/entrypoint.sh +RUN chmod +x /usr/local/bin/entrypoint.sh +# Copy built application +COPY --from=builder /app/platforms/group-charter-manager-api/dist ./platforms/group-charter-manager-api/dist +COPY --from=builder /app/platforms/group-charter-manager-api/package.json ./platforms/group-charter-manager-api/ +COPY --from=builder /app/platforms/group-charter-manager-api/node_modules ./platforms/group-charter-manager-api/node_modules +COPY --from=builder /app/infrastructure ./infrastructure +COPY --from=builder /app/node_modules ./node_modules +COPY --from=builder /app/package.json ./ +COPY --from=builder /app/pnpm-workspace.yaml ./ + +WORKDIR /app/platforms/group-charter-manager-api +EXPOSE 3003 +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] +CMD ["pnpm", "start"] + diff --git a/docker/Dockerfile.marketplace b/docker/Dockerfile.marketplace new file mode 100644 index 00000000..edc9cec7 --- /dev/null +++ b/docker/Dockerfile.marketplace @@ -0,0 +1,37 @@ +FROM node:18-alpine AS base +RUN apk update && apk add --no-cache libc6-compat +WORKDIR /app + +# --- +FROM base AS prepare +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +RUN npm install -g turbo@^2 +COPY . . +# Generate a partial monorepo with a pruned lockfile for marketplace +RUN turbo prune marketplace --docker + +# --- +FROM base AS builder +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# First install the dependencies (as they change less often) +COPY --from=prepare /app/out/json/ . +RUN pnpm install --frozen-lockfile +# Build the project +COPY --from=prepare /app/out/full/ . +RUN pnpm turbo build --filter=marketplace + +# --- +FROM base AS runner +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Copy built application +COPY --from=builder /app/platforms/marketplace/dist ./platforms/marketplace/dist +COPY --from=builder /app/platforms/marketplace/package.json ./platforms/marketplace/ +COPY --from=builder /app/platforms/marketplace/node_modules ./platforms/marketplace/node_modules +COPY --from=builder /app/node_modules ./node_modules +COPY --from=builder /app/package.json ./ +COPY --from=builder /app/pnpm-workspace.yaml ./ + +WORKDIR /app/platforms/marketplace +EXPOSE 5001 +CMD ["pnpm", "start"] + diff --git a/docker/Dockerfile.pictique b/docker/Dockerfile.pictique new file mode 100644 index 00000000..8f7b7668 --- /dev/null +++ b/docker/Dockerfile.pictique @@ -0,0 +1,27 @@ +FROM node:18-alpine AS base +RUN apk update && apk add --no-cache libc6-compat +WORKDIR /app + +# --- +FROM base AS prepare +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +RUN npm install -g turbo@^2 +COPY . . +RUN turbo prune pictique --docker + +# --- +FROM base AS runner +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Copy workspace config +COPY --from=prepare /app/pnpm-workspace.yaml ./ +COPY --from=prepare /app/package.json ./ +# Copy pruned workspace +COPY --from=prepare /app/out/json/ . +# Install dependencies (build will happen at runtime with volumes) +RUN pnpm install --frozen-lockfile +COPY --from=prepare /app/out/full/ . + +WORKDIR /app/platforms/pictique +EXPOSE 5173 +CMD ["pnpm", "dev"] + diff --git a/docker/Dockerfile.pictique-api b/docker/Dockerfile.pictique-api new file mode 100644 index 00000000..41b6afbf --- /dev/null +++ b/docker/Dockerfile.pictique-api @@ -0,0 +1,57 @@ +FROM node:18-alpine AS base +RUN apk update && apk add --no-cache libc6-compat +WORKDIR /app + +# --- +FROM base AS prepare +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +RUN npm install -g turbo@^2 +COPY . . +# Generate a partial monorepo with a pruned lockfile for piqtique-api +RUN turbo prune piqtique-api --docker + +# --- +FROM base AS builder +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Copy workspace config first so pnpm recognizes workspace packages +COPY --from=prepare /app/pnpm-workspace.yaml ./ +COPY --from=prepare /app/package.json ./ +# Copy infrastructure folder (before install) so postinstall scripts can find tsconfig files +# web3-adapter depends on evault-core, which depends on w3id +COPY --from=prepare /app/infrastructure/w3id ./infrastructure/w3id +COPY --from=prepare /app/infrastructure/evault-core ./infrastructure/evault-core +COPY --from=prepare /app/infrastructure/web3-adapter ./infrastructure/web3-adapter +# First install the dependencies (as they change less often) +# Use --no-frozen-lockfile because w3id dependencies aren't in the pruned lockfile +COPY --from=prepare /app/out/json/ . +RUN pnpm install --no-frozen-lockfile +# Build the project +COPY --from=prepare /app/out/full/ . +# Install dependencies for workspace packages (they need to be recognized as workspace packages) +# Use --no-frozen-lockfile because these weren't in the pruned lockfile +RUN pnpm install --no-frozen-lockfile +# Build workspace dependencies in order, then the main package +RUN pnpm turbo build --filter=w3id && pnpm turbo build --filter=evault-core && pnpm turbo build --filter=web3-adapter && pnpm turbo build --filter=piqtique-api + +# --- +FROM base AS runner +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Create parent directory structure for SQLite databases (must exist before volume mount) +RUN mkdir -p /app/data/mapping-dbs/pictique +# Copy entrypoint script +COPY --from=prepare /app/docker/entrypoint.sh /usr/local/bin/entrypoint.sh +RUN chmod +x /usr/local/bin/entrypoint.sh +# Copy built application +COPY --from=builder /app/platforms/pictique-api/dist ./platforms/pictique-api/dist +COPY --from=builder /app/platforms/pictique-api/package.json ./platforms/pictique-api/ +COPY --from=builder /app/platforms/pictique-api/node_modules ./platforms/pictique-api/node_modules +COPY --from=builder /app/infrastructure ./infrastructure +COPY --from=builder /app/node_modules ./node_modules +COPY --from=builder /app/package.json ./ +COPY --from=builder /app/pnpm-workspace.yaml ./ + +WORKDIR /app/platforms/pictique-api +EXPOSE 1111 +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] +CMD ["pnpm", "start"] + diff --git a/docker/Dockerfile.registry b/docker/Dockerfile.registry new file mode 100644 index 00000000..b2d715e2 --- /dev/null +++ b/docker/Dockerfile.registry @@ -0,0 +1,38 @@ +FROM node:18-alpine AS base +RUN apk update && apk add --no-cache libc6-compat +WORKDIR /app + +# --- +FROM base AS prepare +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +RUN npm install -g turbo@^2 +COPY . . +# Generate a partial monorepo with a pruned lockfile for registry +RUN turbo prune registry --docker + +# --- +FROM base AS builder +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# First install the dependencies (as they change less often) +COPY --from=prepare /app/out/json/ . +RUN pnpm install --frozen-lockfile +# Build the project +COPY --from=prepare /app/out/full/ . +# Build workspace dependencies first, then the main package (if any) +RUN pnpm turbo build --filter=registry + +# --- +FROM base AS runner +RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +# Copy built application +COPY --from=builder /app/platforms/registry/dist ./platforms/registry/dist +COPY --from=builder /app/platforms/registry/package.json ./platforms/registry/ +COPY --from=builder /app/platforms/registry/node_modules ./platforms/registry/node_modules +COPY --from=builder /app/node_modules ./node_modules +COPY --from=builder /app/package.json ./ +COPY --from=builder /app/pnpm-workspace.yaml ./ + +WORKDIR /app/platforms/registry +EXPOSE 4321 +CMD ["pnpm", "start"] + diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh new file mode 100755 index 00000000..25a70eb0 --- /dev/null +++ b/docker/entrypoint.sh @@ -0,0 +1,23 @@ +#!/bin/sh +set -e + +# Create parent directory first - this is critical for SQLite +# The parent directory MUST exist before creating subdirectories +mkdir -p /app/data +mkdir -p /app/data/mapping-dbs + +# Create SQLite mapping database directories if they don't exist +# This ensures directories exist even when volumes are mounted +mkdir -p /app/data/mapping-dbs/pictique +mkdir -p /app/data/mapping-dbs/evoting +mkdir -p /app/data/mapping-dbs/dreamsync +mkdir -p /app/data/mapping-dbs/cerberus +mkdir -p /app/data/mapping-dbs/group-charter +mkdir -p /app/data/mapping-dbs/blabsy + +# Ensure proper permissions (read/write/execute for owner and group) +chmod -R 755 /app/data/mapping-dbs 2>/dev/null || true + +# Execute the command passed to the entrypoint +exec "$@" + diff --git a/infrastructure/control-panel/src/lib/services/registry.ts b/infrastructure/control-panel/src/lib/services/registry.ts index ecde4925..99b1c8b0 100644 --- a/infrastructure/control-panel/src/lib/services/registry.ts +++ b/infrastructure/control-panel/src/lib/services/registry.ts @@ -65,25 +65,25 @@ export class RegistryService { return [ { name: 'Blabsy', - url: 'http://192.168.0.225:4444', + url: 'http://192.168.0.235:4444', status: 'Active', uptime: '24h' }, { name: 'Pictique', - url: 'http://192.168.0.225:1111', + url: 'http://192.168.0.235:1111', status: 'Active', uptime: '24h' }, { name: 'Group Charter', - url: 'http://192.168.0.225:5555', + url: 'http://192.168.0.235:5555', status: 'Active', uptime: '24h' }, { name: 'Cerberus', - url: 'http://192.168.0.225:6666', + url: 'http://192.168.0.235:6666', status: 'Active', uptime: '24h' } diff --git a/infrastructure/eid-wallet/src/lib/global/controllers/evault.ts b/infrastructure/eid-wallet/src/lib/global/controllers/evault.ts index 75e17ea3..1802b143 100644 --- a/infrastructure/eid-wallet/src/lib/global/controllers/evault.ts +++ b/infrastructure/eid-wallet/src/lib/global/controllers/evault.ts @@ -169,9 +169,16 @@ export class VaultController { /** * Create a new GraphQL client every time */ - private async ensureClient(w3id: string): Promise { + private async ensureClient( + w3id: string, + ename: string, + ): Promise { this.#endpoint = await this.resolveEndpoint(w3id); - this.#client = new GraphQLClient(this.#endpoint); + this.#client = new GraphQLClient(this.#endpoint, { + headers: { + "X-ENAME": ename, + }, + }); return this.#client; } @@ -204,7 +211,7 @@ export class VaultController { for (let attempt = 1; attempt <= maxRetries; attempt++) { try { - const client = await this.ensureClient(w3id); + const client = await this.ensureClient(w3id, ename); console.log( `Attempting to create UserProfile in eVault (attempt ${attempt}/${maxRetries})`, diff --git a/infrastructure/eid-wallet/src/routes/(app)/scan-qr/+page.svelte b/infrastructure/eid-wallet/src/routes/(app)/scan-qr/+page.svelte index d335b52e..05d8db55 100644 --- a/infrastructure/eid-wallet/src/routes/(app)/scan-qr/+page.svelte +++ b/infrastructure/eid-wallet/src/routes/(app)/scan-qr/+page.svelte @@ -1071,7 +1071,7 @@ onMount(async () => { // Extract platform URL from the data const platformUrl = signingData?.platformUrl || - "http://192.168.0.225:7777"; + "http://192.168.0.235:7777"; // Set up signingData for blind voting UI signingData = { diff --git a/infrastructure/evault-core/package.json b/infrastructure/evault-core/package.json index 1d7fafd8..94da22be 100644 --- a/infrastructure/evault-core/package.json +++ b/infrastructure/evault-core/package.json @@ -10,10 +10,10 @@ "test": "vitest --exclude '**/e2e/**'", "test:e2e": "vitest src/e2e/evault-core.e2e.spec.ts --run --config vitest.config.e2e.ts", "test:e2e:web3-adapter": "vitest src/e2e/evault-core.e2e.spec.ts --run --config vitest.config.e2e.ts", - "typeorm": "typeorm-ts-node-commonjs", - "migration:generate": "npm run typeorm migration:generate -- -d src/config/database.ts", - "migration:run": "npm run typeorm migration:run -- -d src/config/database.ts", - "migration:revert": "npm run typeorm migration:revert -- -d src/config/database.ts" + "typeorm": "typeorm", + "migration:generate": "npm run typeorm migration:generate -- -d dist/config/database.js", + "migration:run": "npm run typeorm migration:run -- -d dist/config/database.js", + "migration:revert": "npm run typeorm migration:revert -- -d dist/config/database.js" }, "dependencies": { "@fastify/formbody": "^8.0.2", @@ -54,4 +54,4 @@ "typescript": "^5.3.3", "vitest": "^1.6.1" } -} \ No newline at end of file +} diff --git a/infrastructure/evault-core/src/core/protocol/graphql-server.ts b/infrastructure/evault-core/src/core/protocol/graphql-server.ts index c932f49f..3419d52c 100644 --- a/infrastructure/evault-core/src/core/protocol/graphql-server.ts +++ b/infrastructure/evault-core/src/core/protocol/graphql-server.ts @@ -39,13 +39,13 @@ export class GraphQLServer { */ private async getActivePlatforms(): Promise { try { - if (!process.env.REGISTRY_URL) { + if (!process.env.PUBLIC_REGISTRY_URL) { console.error("REGISTRY_URL is not set"); return []; } const response = await axios.get( - new URL("/platforms", process.env.REGISTRY_URL).toString() + new URL("/platforms", process.env.PUBLIC_REGISTRY_URL).toString() ); return response.data; } catch (error) { diff --git a/infrastructure/evault-core/src/services/ProvisioningService.spec.ts b/infrastructure/evault-core/src/services/ProvisioningService.spec.ts index eb2c60e1..c9ef9111 100644 --- a/infrastructure/evault-core/src/services/ProvisioningService.spec.ts +++ b/infrastructure/evault-core/src/services/ProvisioningService.spec.ts @@ -83,6 +83,7 @@ describe("ProvisioningService", () => { registryUrl = "http://localhost:4322"; process.env.PUBLIC_REGISTRY_URL = registryUrl; process.env.REGISTRY_SHARED_SECRET = "test-secret"; + process.env.PUBLIC_EVAULT_SERVER_URI = "http://localhost:3000"; provisioningService = new ProvisioningService(verificationService); }); @@ -92,6 +93,7 @@ describe("ProvisioningService", () => { await teardownTestDatabase(); delete process.env.PUBLIC_REGISTRY_URL; delete process.env.REGISTRY_SHARED_SECRET; + delete process.env.PUBLIC_EVAULT_SERVER_URI; }); beforeEach(async () => { diff --git a/infrastructure/evault-core/src/services/ProvisioningService.ts b/infrastructure/evault-core/src/services/ProvisioningService.ts index bf78f035..5d342fbe 100644 --- a/infrastructure/evault-core/src/services/ProvisioningService.ts +++ b/infrastructure/evault-core/src/services/ProvisioningService.ts @@ -20,7 +20,7 @@ export interface ProvisionResponse { } export class ProvisioningService { - constructor(private verificationService: VerificationService) {} + constructor(private verificationService: VerificationService) { } /** * Provisions a new eVault logically (no infrastructure creation) @@ -69,10 +69,9 @@ export class ProvisioningService { // If JWT verification fails, re-throw with a clearer message // but preserve the original error for debugging throw new Error( - `JWT verification failed: ${ - jwtError instanceof Error - ? jwtError.message - : String(jwtError) + `JWT verification failed: ${jwtError instanceof Error + ? jwtError.message + : String(jwtError) }`, ); } @@ -97,10 +96,9 @@ export class ProvisioningService { // If W3ID generation fails, it's likely an entropy format issue // Re-throw with clearer message, but let verification errors take precedence throw new Error( - `Failed to generate W3ID from entropy: ${ - w3idError instanceof Error - ? w3idError.message - : String(w3idError) + `Failed to generate W3ID from entropy: ${w3idError instanceof Error + ? w3idError.message + : String(w3idError) }`, ); } @@ -149,23 +147,16 @@ export class ProvisioningService { evaultId = await new W3IDBuilder().withGlobal(true).build(); } catch (evaultIdError) { throw new Error( - `Failed to generate evault ID: ${ - evaultIdError instanceof Error - ? evaultIdError.message - : String(evaultIdError) + `Failed to generate evault ID: ${evaultIdError instanceof Error + ? evaultIdError.message + : String(evaultIdError) }`, ); } // Build URI (IP:PORT format pointing to shared service) - const fastifyPort = - process.env.FASTIFY_PORT || process.env.PORT || 4000; - const baseUri = - process.env.EVAULT_BASE_URI || - `http://${ - process.env.EVAULT_HOST || "localhost" - }:${fastifyPort}`; - const uri = baseUri; + const uri = process.env.PUBLIC_EVAULT_SERVER_URI; + console.log("URI set", uri) // Register in registry await axios.post( diff --git a/infrastructure/evault-core/src/test-utils/e2e-setup.ts b/infrastructure/evault-core/src/test-utils/e2e-setup.ts index 310f09e2..7fc612bd 100644 --- a/infrastructure/evault-core/src/test-utils/e2e-setup.ts +++ b/infrastructure/evault-core/src/test-utils/e2e-setup.ts @@ -66,7 +66,9 @@ export async function setupE2ETestServer( process.env.NEO4J_USER = neo4jContainer.getUsername(); process.env.NEO4J_PASSWORD = neo4jContainer.getPassword(); process.env.PUBLIC_REGISTRY_URL = registryUrl; + process.env.REGISTRY_URL = registryUrl; // Also set REGISTRY_URL for vault-access-guard process.env.REGISTRY_SHARED_SECRET = "test-secret"; + process.env.PUBLIC_EVAULT_SERVER_URI = `http://localhost:${fastifyPort}`; process.env.EVAULT_BASE_URI = `http://localhost:${fastifyPort}`; process.env.EVAULT_HOST = "localhost"; process.env.PORT = String(fastifyPort); @@ -222,7 +224,9 @@ export async function teardownE2ETestServer(server: E2ETestServer | undefined): delete process.env.NEO4J_USER; delete process.env.NEO4J_PASSWORD; delete process.env.PUBLIC_REGISTRY_URL; + delete process.env.REGISTRY_URL; delete process.env.REGISTRY_SHARED_SECRET; + delete process.env.PUBLIC_EVAULT_SERVER_URI; delete process.env.EVAULT_BASE_URI; delete process.env.EVAULT_HOST; delete process.env.PORT; diff --git a/infrastructure/web3-adapter/src/db/mapping.db.ts b/infrastructure/web3-adapter/src/db/mapping.db.ts index a2b2b1d4..3c8807a7 100644 --- a/infrastructure/web3-adapter/src/db/mapping.db.ts +++ b/infrastructure/web3-adapter/src/db/mapping.db.ts @@ -1,4 +1,5 @@ -import { join } from "node:path"; +import { mkdirSync } from "node:fs"; +import { dirname, join } from "node:path"; import { promisify } from "node:util"; import sqlite3 from "sqlite3"; @@ -19,6 +20,15 @@ export class MappingDatabase { constructor(dbPath: string) { // Ensure the directory exists const fullPath = join(dbPath, "mappings.db"); + const dbDir = dirname(fullPath); + try { + mkdirSync(dbDir, { recursive: true }); + } catch (error) { + // Directory might already exist, which is fine + if ((error as NodeJS.ErrnoException).code !== "EEXIST") { + throw error; + } + } this.db = new sqlite3.Database(fullPath); // Promisify database methods diff --git a/neo4j-compose.yaml b/neo4j-compose.yaml new file mode 100644 index 00000000..5a959e3b --- /dev/null +++ b/neo4j-compose.yaml @@ -0,0 +1,21 @@ +services: + neo4j: + image: neo4j:5.15 + container_name: metastate-neo4j + ports: + - "7474:7474" # HTTP + - "7687:7687" # Bolt + environment: + - NEO4J_AUTH=${NEO4J_USER:-neo4j}/${NEO4J_PASSWORD:-neo4j} + volumes: + - neo4j_data:/data + restart: unless-stopped + healthcheck: + test: [ "CMD-SHELL", "cypher-shell -u neo4j -p ${NEO4J_PASSWORD:-neo4j} 'RETURN 1' || exit 1" ] + interval: 10s + timeout: 5s + retries: 5 + +volumes: + neo4j_data: + diff --git a/package.json b/package.json index 4ea8df2d..ca1c1549 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,11 @@ "check-types": "turbo run check-types", "dev:docker": "docker compose -f dev-docker-compose.yaml up --watch", "dev:docker:down": "docker compose -f dev-docker-compose.yaml down", - "dev:docker:all": "docker compose -f dev-docker-compose.yaml --profile all up --watch" + "dev:docker:all": "docker compose -f dev-docker-compose.yaml --profile all up --watch", + "dev:docker:socials": "docker compose -f dev-docker-compose.yaml --profile socials up --watch", + "dev:docker:charter-blabsy": "docker compose -f dev-docker-compose.yaml --profile charter-blabsy up --watch", + "dev:docker:neo4j": "docker compose -f neo4j-compose.yaml up", + "dev:docker:neo4j:down": "docker compose -f neo4j-compose.yaml down" }, "devDependencies": { "@biomejs/biome": "^1.9.4", diff --git a/platforms/blabsy-w3ds-auth-api/dev.sh b/platforms/blabsy-w3ds-auth-api/dev.sh new file mode 100644 index 00000000..1c592615 --- /dev/null +++ b/platforms/blabsy-w3ds-auth-api/dev.sh @@ -0,0 +1,20 @@ +#!/bin/sh +set -e + +# Initial build +pnpm build + +# Start TypeScript compiler in watch mode in background +tsc --watch & +TSC_PID=$! + +# Start nodemon watching dist +nodemon --watch dist dist/index.js & +NODEMON_PID=$! + +# Trap signals to kill background processes +trap 'kill $TSC_PID $NODEMON_PID 2>/dev/null; exit' INT TERM + +# Wait for either process to exit +wait $TSC_PID $NODEMON_PID + diff --git a/platforms/blabsy-w3ds-auth-api/src/controllers/WebhookController.ts b/platforms/blabsy-w3ds-auth-api/src/controllers/WebhookController.ts index c5f9aa73..d9bb7784 100644 --- a/platforms/blabsy-w3ds-auth-api/src/controllers/WebhookController.ts +++ b/platforms/blabsy-w3ds-auth-api/src/controllers/WebhookController.ts @@ -157,6 +157,39 @@ export class WebhookController { const mappedData = await this.mapDataToFirebase(tableName, data); if (tableName === "users") { docRef = collection.doc(data.ename); + } else if (tableName === "chats") { + // Check for existing DM (2 participants, no name) before creating + const participants = mappedData.participants || []; + const isDM = participants.length === 2 && !mappedData.name; + + if (isDM) { + // Query for existing chats with these participants + const existingChatsQuery = collection.where('participants', 'array-contains', participants[0]); + const existingChatsSnapshot = await existingChatsQuery.get(); + + for (const doc of existingChatsSnapshot.docs) { + const chat = doc.data(); + // Check if it's a direct chat (2 participants) with same participants + if ( + chat.participants && + chat.participants.length === 2 && + chat.participants.includes(participants[0]) && + chat.participants.includes(participants[1]) + ) { + // Use existing chat and store mapping + docRef = collection.doc(doc.id); + adapter.addToLockedIds(docRef.id); + adapter.addToLockedIds(globalId); + await adapter.mappingDb.storeMapping({ + globalId: globalId, + localId: docRef.id, + }); + return; // Exit early, don't create new chat + } + } + } + // No existing DM found or it's a group chat - create new + docRef = collection.doc(); } else { // Use auto-generated ID for other tables docRef = collection.doc(); diff --git a/platforms/blabsy-w3ds-auth-api/src/index.ts b/platforms/blabsy-w3ds-auth-api/src/index.ts index 9e3606d3..b21e7adc 100644 --- a/platforms/blabsy-w3ds-auth-api/src/index.ts +++ b/platforms/blabsy-w3ds-auth-api/src/index.ts @@ -26,9 +26,19 @@ app.use(express.urlencoded({ limit: "50mb", extended: true })); const authController = new AuthController(); -initializeApp({ - credential: applicationDefault(), -}); +// Initialize Firebase Admin SDK (only if credentials are available) +try { + if (process.env.GOOGLE_APPLICATION_CREDENTIALS || process.env.FIREBASE_CREDENTIALS_PATH) { + initializeApp({ + credential: applicationDefault(), + }); + } else { + console.warn("⚠️ Firebase credentials not configured. Firebase features will be disabled."); + } +} catch (error: any) { + console.warn("⚠️ Failed to initialize Firebase Admin SDK:", error.message); + console.warn("⚠️ Firebase features will be disabled."); +} // Initialize Web3Adapter const web3Adapter = new Web3Adapter(); diff --git a/platforms/blabsy-w3ds-auth-api/src/web3adapter/watchers/firestoreWatcher.ts b/platforms/blabsy-w3ds-auth-api/src/web3adapter/watchers/firestoreWatcher.ts index ca2a93ff..d3fc1a9a 100644 --- a/platforms/blabsy-w3ds-auth-api/src/web3adapter/watchers/firestoreWatcher.ts +++ b/platforms/blabsy-w3ds-auth-api/src/web3adapter/watchers/firestoreWatcher.ts @@ -15,10 +15,10 @@ export class FirestoreWatcher { private unsubscribe: (() => void) | null = null; private adapter = adapter; private db: FirebaseFirestore.Firestore; - private isProcessing = false; private retryCount = 0; - private readonly maxRetries: number = 3; + private readonly maxRetries: number = 10; // Increased retries private readonly retryDelay: number = 1000; // 1 second + private isFirstSnapshot = true; // Skip the initial snapshot that contains all existing documents // Track processed document IDs to prevent duplicates private processedIds = new Set(); @@ -26,6 +26,20 @@ export class FirestoreWatcher { // Clean up old processed IDs periodically to prevent memory leaks private cleanupInterval: NodeJS.Timeout | null = null; + + // Connection health monitoring + private lastSnapshotTime: number = Date.now(); + private healthCheckInterval: NodeJS.Timeout | null = null; + private readonly healthCheckIntervalMs = 60000; // 1 minute + private readonly maxTimeWithoutSnapshot = 120000; // 2 minutes - if no snapshot in 2 min, reconnect + + // Reconnection policy + private currentAttempt = 0; + private readonly maxAttempts = 20; // Maximum reconnection attempts + private readonly baseDelay = 1000; // Base delay in ms + private readonly maxDelay = 60000; // Maximum delay cap (60 seconds) + private reconnectTimeoutId: NodeJS.Timeout | null = null; + private stopped = false; // Flag to stop reconnection attempts constructor( private readonly collection: @@ -41,31 +55,32 @@ export class FirestoreWatcher { ? this.collection.path : "collection group"; - try { - // First, get all existing documents - const snapshot = await this.collection.get(); - await this.processSnapshot(snapshot); + // Reset stopped flag when starting + this.stopped = false; - // Then set up real-time listener + try { + // Set up real-time listener (only for new changes, not existing documents) this.unsubscribe = this.collection.onSnapshot( async (snapshot) => { - if (this.isProcessing) { - console.log( - "Still processing previous snapshot, skipping..." - ); + // Update last snapshot time for health monitoring + this.lastSnapshotTime = Date.now(); + + // Skip the first snapshot which contains all existing documents + if (this.isFirstSnapshot) { + console.log(`Skipping initial snapshot for ${collectionPath} (contains all existing documents)`); + this.isFirstSnapshot = false; return; } - - try { - this.isProcessing = true; - await this.processSnapshot(snapshot); - this.retryCount = 0; // Reset retry count on success - } catch (error) { + + // Don't skip snapshots - queue them instead to handle large databases + // Process snapshot asynchronously without blocking new snapshots + this.processSnapshot(snapshot).catch((error) => { console.error("Error processing snapshot:", error); - await this.handleError(error); - } finally { - this.isProcessing = false; - } + this.handleError(error); + }); + + // Reset retry count on successful snapshot receipt + this.retryCount = 0; }, (error) => { console.error("Error in Firestore listener:", error); @@ -77,6 +92,9 @@ export class FirestoreWatcher { // Start cleanup interval to prevent memory leaks this.startCleanupInterval(); + + // Start health check to detect silent disconnects + this.startHealthCheck(); } catch (error) { console.error( `Failed to start watcher for ${collectionPath}:`, @@ -93,6 +111,9 @@ export class FirestoreWatcher { : "collection group"; console.log(`Stopping watcher for collection: ${collectionPath}`); + // Set stopped flag to prevent new reconnection attempts + this.stopped = true; + if (this.unsubscribe) { this.unsubscribe(); this.unsubscribe = null; @@ -104,6 +125,18 @@ export class FirestoreWatcher { clearInterval(this.cleanupInterval); this.cleanupInterval = null; } + + // Stop health check + if (this.healthCheckInterval) { + clearInterval(this.healthCheckInterval); + this.healthCheckInterval = null; + } + + // Clear any pending reconnect timeout + if (this.reconnectTimeoutId) { + clearTimeout(this.reconnectTimeoutId); + this.reconnectTimeoutId = null; + } } private startCleanupInterval(): void { @@ -116,6 +149,130 @@ export class FirestoreWatcher { }, 5 * 60 * 1000); // 5 minutes } + private startHealthCheck(): void { + // Check connection health periodically + this.healthCheckInterval = setInterval(() => { + const timeSinceLastSnapshot = Date.now() - this.lastSnapshotTime; + const collectionPath = + this.collection instanceof CollectionReference + ? this.collection.path + : "collection group"; + + if (timeSinceLastSnapshot > this.maxTimeWithoutSnapshot) { + console.warn( + `⚠️ Health check failed for ${collectionPath}: No snapshot received in ${timeSinceLastSnapshot}ms. Reconnecting...` + ); + // Silently reconnect - don't increment retry count for health checks + // Use async IIFE to properly await and handle errors + (async () => { + try { + await this.reconnect(); + } catch (error) { + console.error(`Error during health-check reconnect for ${collectionPath}:`, error); + } + })(); + } + }, this.healthCheckIntervalMs); + } + + private async reconnect(): Promise { + const collectionPath = + this.collection instanceof CollectionReference + ? this.collection.path + : "collection group"; + + console.log(`Reconnecting watcher for ${collectionPath}...`); + + // Clear existing intervals before restarting + if (this.healthCheckInterval) { + clearInterval(this.healthCheckInterval); + this.healthCheckInterval = null; + } + if (this.cleanupInterval) { + clearInterval(this.cleanupInterval); + this.cleanupInterval = null; + } + + // Clear any pending reconnect timeout + if (this.reconnectTimeoutId) { + clearTimeout(this.reconnectTimeoutId); + this.reconnectTimeoutId = null; + } + + // Clean up old listener + if (this.unsubscribe) { + this.unsubscribe(); + this.unsubscribe = null; + } + + // Reset first snapshot flag + this.isFirstSnapshot = true; + this.lastSnapshotTime = Date.now(); + + // Reset reconnection attempt counter on successful reconnect + this.currentAttempt = 0; + + // Restart the listener + try { + await this.start(); + } catch (error) { + console.error(`Failed to reconnect watcher for ${collectionPath}:`, error); + // Schedule retry with exponential backoff + this.scheduleReconnect(); + } + } + + /** + * Schedules a reconnection attempt with exponential backoff + */ + private scheduleReconnect(): void { + if (this.stopped) { + console.error("Watcher is stopped, not scheduling reconnect"); + return; + } + + if (this.currentAttempt >= this.maxAttempts) { + console.error(`Max reconnection attempts (${this.maxAttempts}) reached. Stopping reconnection attempts.`); + this.stopped = true; + return; + } + + // Clear any existing timeout + if (this.reconnectTimeoutId) { + clearTimeout(this.reconnectTimeoutId); + this.reconnectTimeoutId = null; + } + + this.currentAttempt++; + + // Calculate exponential backoff with jitter + const exponentialDelay = Math.min( + this.baseDelay * Math.pow(2, this.currentAttempt - 1), + this.maxDelay + ); + // Add jitter: ±20% of the delay + const jitter = exponentialDelay * 0.2 * (Math.random() * 2 - 1); + const delay = Math.floor(exponentialDelay + jitter); + + const collectionPath = + this.collection instanceof CollectionReference + ? this.collection.path + : "collection group"; + + console.log(`Scheduling reconnect attempt ${this.currentAttempt}/${this.maxAttempts} for ${collectionPath} in ${delay}ms`); + + this.reconnectTimeoutId = setTimeout(async () => { + this.reconnectTimeoutId = null; + try { + await this.reconnect(); + } catch (error) { + console.error(`Error during scheduled reconnect for ${collectionPath}:`, error); + // Schedule another attempt + this.scheduleReconnect(); + } + }, delay); + } + // Method to manually clear processed IDs (useful for debugging) clearProcessedIds(): void { const beforeSize = this.processedIds.size; @@ -132,16 +289,57 @@ export class FirestoreWatcher { } private async handleError(error: any): Promise { + const collectionPath = + this.collection instanceof CollectionReference + ? this.collection.path + : "collection group"; + + // Clear existing intervals before restarting + if (this.healthCheckInterval) { + clearInterval(this.healthCheckInterval); + this.healthCheckInterval = null; + } + if (this.cleanupInterval) { + clearInterval(this.cleanupInterval); + this.cleanupInterval = null; + } + + // Clear any pending reconnect timeout + if (this.reconnectTimeoutId) { + clearTimeout(this.reconnectTimeoutId); + this.reconnectTimeoutId = null; + } + if (this.retryCount < this.maxRetries) { this.retryCount++; - console.log(`Retrying (${this.retryCount}/${this.maxRetries})...`); + console.log(`Retrying (${this.retryCount}/${this.maxRetries}) for ${collectionPath}...`); await new Promise((resolve) => setTimeout(resolve, this.retryDelay * this.retryCount) ); - await this.start(); + + // Clean up old listener before restarting + if (this.unsubscribe) { + this.unsubscribe(); + this.unsubscribe = null; + } + + // Reset first snapshot flag when restarting + this.isFirstSnapshot = true; + this.lastSnapshotTime = Date.now(); + + try { + await this.start(); + } catch (restartError) { + console.error(`Failed to restart watcher for ${collectionPath}:`, restartError); + // Continue retrying + this.handleError(restartError); + } } else { - console.error("Max retries reached, stopping watcher"); - await this.stop(); + console.error(`Max retries reached for ${collectionPath}, but continuing to retry...`); + // Instead of stopping, reset retry count and keep trying + this.retryCount = 0; + await new Promise((resolve) => setTimeout(resolve, this.retryDelay * 5)); + await this.reconnect(); } } @@ -155,7 +353,8 @@ export class FirestoreWatcher { `Processing ${changes.length} changes in ${collectionPath}` ); - for (const change of changes) { + // Process all changes in parallel immediately (no batching) + const processPromises = changes.map(async (change) => { const doc = change.doc; const docId = doc.id; const data = doc.data(); @@ -167,25 +366,28 @@ export class FirestoreWatcher { // Check if already processed or currently processing if (this.processedIds.has(docId) || this.processingIds.has(docId)) { console.log(`${collectionPath} - skipping duplicate/processing - ${docId}`); - continue; + return; } // Check if locked in adapter if (adapter.lockedIds.includes(docId)) { console.log(`${collectionPath} - skipping locked - ${docId}`); - continue; + return; } // Mark as currently processing this.processingIds.add(docId); - // Process immediately without setTimeout to prevent race conditions - console.log(`${collectionPath} - processing - ${docId}`); - await this.handleCreateOrUpdate(doc, data); - - // Mark as processed and remove from processing - this.processedIds.add(docId); - this.processingIds.delete(docId); + try { + // Process immediately + console.log(`${collectionPath} - processing - ${docId}`); + await this.handleCreateOrUpdate(doc, data); + + // Mark as processed + this.processedIds.add(docId); + } finally { + this.processingIds.delete(docId); + } break; case "removed": @@ -204,7 +406,10 @@ export class FirestoreWatcher { this.processingIds.delete(docId); // Continue processing other changes even if one fails } - } + }); + + // Process all changes in parallel + await Promise.all(processPromises); } private async handleCreateOrUpdate( diff --git a/platforms/blabsy/src/components/aside/aside-trends.tsx b/platforms/blabsy/src/components/aside/aside-trends.tsx index ff776d0d..23fdb463 100644 --- a/platforms/blabsy/src/components/aside/aside-trends.tsx +++ b/platforms/blabsy/src/components/aside/aside-trends.tsx @@ -53,48 +53,47 @@ export function AsideTrends({ inTrendsPage }: AsideTrendsProps) { )} {trends.map(({ name, query, tweet_volume, url }) => ( - - -
- -
-

- Trending{' '} - {location === 'Worldwide' - ? 'Worldwide' - : `in ${location as string}`} -

-

{name}

-

- {formatNumber(tweet_volume)} tweets -

-
+ iconName='EllipsisHorizontalIcon' + /> + + + +

+ Trending{' '} + {location === 'Worldwide' + ? 'Worldwide' + : `in ${location as string}`} +

+

{name}

+

+ {formatNumber(tweet_volume)} tweets +

))} {!inTrendsPage && ( - - - Show more - + > + Show more )} diff --git a/platforms/blabsy/src/components/aside/suggestions.tsx b/platforms/blabsy/src/components/aside/suggestions.tsx index 3895d972..0ad959c1 100644 --- a/platforms/blabsy/src/components/aside/suggestions.tsx +++ b/platforms/blabsy/src/components/aside/suggestions.tsx @@ -47,13 +47,12 @@ export function Suggestions(): JSX.Element { {suggestionsData?.map((userData) => ( ))} - - - Show more - + > + Show more ) : ( diff --git a/platforms/blabsy/src/components/input/input.tsx b/platforms/blabsy/src/components/input/input.tsx index 256f50a8..5e5aacfb 100644 --- a/platforms/blabsy/src/components/input/input.tsx +++ b/platforms/blabsy/src/components/input/input.tsx @@ -115,8 +115,11 @@ export function Input({ () => ( Your Blab was sent - - View + + View ), @@ -218,16 +221,17 @@ export function Input({ /> )} {children} - {reply && visited && ( + {reply && visited && parent?.username && ( Replying to{' '} - - - {parent?.username as string} - + + {parent.username} )} diff --git a/platforms/blabsy/src/components/modal/mobile-sidebar-modal.tsx b/platforms/blabsy/src/components/modal/mobile-sidebar-modal.tsx index 22b9f1e7..42342093 100644 --- a/platforms/blabsy/src/components/modal/mobile-sidebar-modal.tsx +++ b/platforms/blabsy/src/components/modal/mobile-sidebar-modal.tsx @@ -100,7 +100,7 @@ export function MobileSidebarModal({ ['followers', 'Followers', followers.length] ]; - const userLink = `/user/${username}`; + const userLink = username ? `/user/${username}` : null; return ( <> @@ -136,8 +136,11 @@ export function MobileSidebarModal({ action={closeModal} />
- - + {username && userLink ? ( + {coverPhotoURL ? ( )} - - + + ) : ( +
+ {coverPhotoURL ? ( + + ) : ( +
+ )} +
+ )}
- {allStats.map(([id, label, stat]) => ( - - + userLink ? ( + {label}

-
- - ))} + + ) : ( +
+

{stat}

+

+ {label} +

+
+ ) + )}
+ ); + } + + return ( + + + + +
+
+
+ + + +
+ + + + {follow && } +
+
+ +
+ {follow && bio &&

{bio}

} +
); } diff --git a/platforms/blabsy/src/components/user/user-details.tsx b/platforms/blabsy/src/components/user/user-details.tsx index e019c56f..9b9a0781 100644 --- a/platforms/blabsy/src/components/user/user-details.tsx +++ b/platforms/blabsy/src/components/user/user-details.tsx @@ -47,6 +47,7 @@ export function UserDetails({ diff --git a/platforms/blabsy/src/components/user/user-follow-stats.tsx b/platforms/blabsy/src/components/user/user-follow-stats.tsx index 432b3aab..521de9e8 100644 --- a/platforms/blabsy/src/components/user/user-follow-stats.tsx +++ b/platforms/blabsy/src/components/user/user-follow-stats.tsx @@ -42,7 +42,29 @@ export function UserFollowStats({ query: { id } } = useRouter(); - const userPath = `/user/${id as string}`; + if (!id || typeof id !== 'string') { + // If id is missing, render non-clickable stats + return ( +
+ {[ + ['Following', currentFollowing], + ['Follower', currentFollowers] + ].map(([title, stats], index) => ( +
+

+ {stats} +

+

{index === 1 && stats > 1 ? `${title}s` : title}

+
+ ))} +
+ ); + } + + const userPath = `/user/${id}`; const allStats: Readonly = [ ['Following', `${userPath}/following`, followingMove, currentFollowing], @@ -53,23 +75,21 @@ export function UserFollowStats({
a>div]:text-dark-primary + [&>*>div]:font-bold [&>*>div]:text-light-primary + dark:[&>*>div]:text-dark-primary' > {allStats.map(([title, link, move, stats], index) => ( - - - -

{index === 1 && stats > 1 ? `${title}s` : title}

-
+ > + +

{index === 1 && stats > 1 ? `${title}s` : title}

))}
diff --git a/platforms/blabsy/src/components/user/user-name.tsx b/platforms/blabsy/src/components/user/user-name.tsx index 0da72c03..6c57670c 100644 --- a/platforms/blabsy/src/components/user/user-name.tsx +++ b/platforms/blabsy/src/components/user/user-name.tsx @@ -21,30 +21,47 @@ export function UserName({ }: UserNameProps): JSX.Element { const CustomTag = tag ? tag : 'p'; - return ( - - + {name} + {verified && ( + + + + )} + + ); + + if (!username) { + return ( + + ); + } + + return ( + + {content} ); } diff --git a/platforms/blabsy/src/components/user/user-nav-link.tsx b/platforms/blabsy/src/components/user/user-nav-link.tsx index 06e0ad91..baed3e51 100644 --- a/platforms/blabsy/src/components/user/user-nav-link.tsx +++ b/platforms/blabsy/src/components/user/user-nav-link.tsx @@ -13,28 +13,42 @@ export function UserNavLink({ name, path }: UserNavLinkProps): JSX.Element { query: { id } } = useRouter(); - const userPath = `/user/${id as string}${path ? `/${path}` : ''}`; - - return ( - - + if (!id || typeof id !== 'string') { + // If id is missing, render non-clickable nav item + return ( + + ); + } + + const userPath = `/user/${id}${path ? `/${path}` : ''}`; + + return ( + +
+

i]:scale-100 [&>i]:opacity-100' + : 'text-light-secondary dark:text-dark-secondary' + )} + > + {name} + +

+
); } diff --git a/platforms/blabsy/src/components/user/user-tooltip.tsx b/platforms/blabsy/src/components/user/user-tooltip.tsx index 80ae0efb..f6b382f5 100644 --- a/platforms/blabsy/src/components/user/user-tooltip.tsx +++ b/platforms/blabsy/src/components/user/user-tooltip.tsx @@ -47,6 +47,11 @@ export function UserTooltip({ if (isMobile || modal) return <>{children}; + if (!username) { + // If username is missing, just render children without tooltip + return <>{children}; + } + const userLink = `/user/${username}`; const allStats: Readonly = [ @@ -71,17 +76,15 @@ export function UserTooltip({
{coverPhotoURL ? ( - - - - + + ) : (
@@ -120,17 +123,17 @@ export function UserTooltip({ {bio &&

{bio}

}
{allStats.map(([id, label, stat]) => ( - - -

{stat}

-

- {label} -

-
+ > +

{stat}

+

+ {label} +

))}
diff --git a/platforms/blabsy/src/components/user/user-username.tsx b/platforms/blabsy/src/components/user/user-username.tsx index 97616240..ec9c2f02 100644 --- a/platforms/blabsy/src/components/user/user-username.tsx +++ b/platforms/blabsy/src/components/user/user-username.tsx @@ -2,7 +2,7 @@ import Link from 'next/link'; import cn from 'clsx'; type UserUsernameProps = { - username: string; + username?: string | null; className?: string; disableLink?: boolean; }; @@ -12,18 +12,31 @@ export function UserUsername({ className, disableLink }: UserUsernameProps): JSX.Element { - return ( - - - @{username} - + @user + + ); + } + + return ( + + @{username} ); } diff --git a/platforms/blabsy/src/components/view/view-tweet.tsx b/platforms/blabsy/src/components/view/view-tweet.tsx index a806d8ed..b2422eed 100644 --- a/platforms/blabsy/src/components/view/view-tweet.tsx +++ b/platforms/blabsy/src/components/view/view-tweet.tsx @@ -121,10 +121,11 @@ export function ViewTweet(tweet: ViewTweetProps): JSX.Element { {reply && (

Replying to{' '} - - - @{parentUsername} - + + @{parentUsername}

)} diff --git a/platforms/blabsy/src/lib/firebase/utils.ts b/platforms/blabsy/src/lib/firebase/utils.ts index 8e233764..6e5eda1d 100644 --- a/platforms/blabsy/src/lib/firebase/utils.ts +++ b/platforms/blabsy/src/lib/firebase/utils.ts @@ -392,6 +392,33 @@ export async function createChat( owner?: string, description?: string ): Promise { + // Check for existing DM (2 participants, no name) before creating + const isDM = participants.length === 2 && !name; + + if (isDM) { + // Check if a direct chat already exists between these users + const existingChatsQuery = query( + chatsCollection, + where('participants', 'array-contains', participants[0]) + ); + + const existingChats = await getDocs(existingChatsQuery); + + for (const doc of existingChats.docs) { + const chat = doc.data(); + // Check if it's a direct chat (2 participants) and includes both participants + if ( + chat.participants && + chat.participants.length === 2 && + chat.participants.includes(participants[0]) && + chat.participants.includes(participants[1]) + ) { + return doc.id; // Return existing chat ID + } + } + } + + // No existing DM found or it's a group chat - create new const chatRef = doc(chatsCollection); // Derive type from participant count diff --git a/platforms/dreamsync-api/src/controllers/AuthController.ts b/platforms/dreamsync-api/src/controllers/AuthController.ts index a739d19f..05742e5b 100644 --- a/platforms/dreamsync-api/src/controllers/AuthController.ts +++ b/platforms/dreamsync-api/src/controllers/AuthController.ts @@ -63,8 +63,8 @@ export class AuthController { return res.status(400).json({ error: "session is required" }); } - // Only find existing users - don't create new ones during auth - const user = await this.userService.findUser(ename); + // Find user by ename (handles @ symbol variations) + const user = await this.userService.findByEname(ename); if (!user) { // User doesn't exist - they need to be created via webhook first diff --git a/platforms/dreamsync-api/src/services/UserService.ts b/platforms/dreamsync-api/src/services/UserService.ts index 08a1225e..1b07911b 100644 --- a/platforms/dreamsync-api/src/services/UserService.ts +++ b/platforms/dreamsync-api/src/services/UserService.ts @@ -18,6 +18,28 @@ export class UserService { }); } + /** + * Find a user by ename, regardless of whether the ename is stored with or without @ symbol + * @param ename - The ename to search for (with or without @ prefix) + * @returns The user if found, null otherwise + */ + async findByEname(ename: string): Promise { + // Normalize the input: remove @ if present for comparison + const normalizedEname = ename.startsWith('@') ? ename.slice(1) : ename; + const enameWithAt = `@${normalizedEname}`; + + // Search for user where ename matches either with or without @ + const user = await this.userRepository + .createQueryBuilder("user") + .where("user.ename = :enameWithAt OR user.ename = :enameWithoutAt", { + enameWithAt, + enameWithoutAt: normalizedEname, + }) + .getOne(); + + return user; + } + async getAllUsers(): Promise { return this.userRepository.find(); } @@ -37,7 +59,7 @@ export class UserService { async findUser(ename: string): Promise { // Only find user, don't create - users should only be created via webhooks - return this.getUserByEname(ename); + return this.findByEname(ename); } async updateUser(id: string, updates: Partial): Promise { diff --git a/platforms/eReputation/vite.config.ts b/platforms/eReputation/vite.config.ts index 58097441..e7698701 100644 --- a/platforms/eReputation/vite.config.ts +++ b/platforms/eReputation/vite.config.ts @@ -1,8 +1,11 @@ import { defineConfig } from "vite"; import react from "@vitejs/plugin-react"; import path from "path"; +import { fileURLToPath } from "url"; import runtimeErrorOverlay from "@replit/vite-plugin-runtime-error-modal"; +const __dirname = path.dirname(fileURLToPath(import.meta.url)); + const plugins: any[] = [ react(), runtimeErrorOverlay(), @@ -21,14 +24,14 @@ export default defineConfig({ plugins, resolve: { alias: { - "@": path.resolve(import.meta.dirname, "client", "src"), - "@shared": path.resolve(import.meta.dirname, "shared"), - "@assets": path.resolve(import.meta.dirname, "attached_assets"), + "@": path.resolve(__dirname, "client", "src"), + "@shared": path.resolve(__dirname, "shared"), + "@assets": path.resolve(__dirname, "attached_assets"), }, }, - root: path.resolve(import.meta.dirname, "client"), + root: path.resolve(__dirname, "client"), build: { - outDir: path.resolve(import.meta.dirname, "dist/public"), + outDir: path.resolve(__dirname, "dist/public"), emptyOutDir: true, }, server: { diff --git a/platforms/evoting-api/src/controllers/AuthController.ts b/platforms/evoting-api/src/controllers/AuthController.ts index 35666e8e..8bc1d15c 100644 --- a/platforms/evoting-api/src/controllers/AuthController.ts +++ b/platforms/evoting-api/src/controllers/AuthController.ts @@ -59,8 +59,8 @@ export class AuthController { return res.status(400).json({ error: "ename is required" }); } - // Only find existing users - don't create new ones during auth - const user = await this.userService.findUser(ename); + // Find user by ename (handles @ symbol variations) + const user = await this.userService.findByEname(ename); if (!user) { // User doesn't exist - they need to be created via webhook first diff --git a/platforms/evoting-api/src/services/UserService.ts b/platforms/evoting-api/src/services/UserService.ts index 4714cd74..26592ed5 100644 --- a/platforms/evoting-api/src/services/UserService.ts +++ b/platforms/evoting-api/src/services/UserService.ts @@ -21,6 +21,30 @@ export class UserService { }); } + /** + * Find a user by ename, regardless of whether the ename is stored with or without @ symbol + * @param ename - The ename to search for (with or without @ prefix) + * @returns The user if found, null otherwise + */ + async findByEname(ename: string): Promise { + // Normalize the input: remove @ if present for comparison + const normalizedEname = ename.startsWith('@') ? ename.slice(1) : ename; + const enameWithAt = `@${normalizedEname}`; + + // Search for user where ename matches either with or without @ + const user = await this.userRepository + .createQueryBuilder("user") + .leftJoinAndSelect("user.polls", "polls") + .leftJoinAndSelect("user.votes", "votes") + .where("user.ename = :enameWithAt OR user.ename = :enameWithoutAt", { + enameWithAt, + enameWithoutAt: normalizedEname, + }) + .getOne(); + + return user; + } + async getAllUsers(): Promise { return this.userRepository.find({ relations: ["polls", "votes"], @@ -42,7 +66,7 @@ export class UserService { async findUser(ename: string): Promise { // Only find user, don't create - users should only be created via webhooks - return this.getUserByEname(ename); + return this.findByEname(ename); } async updateUser(id: string, updates: Partial): Promise { diff --git a/platforms/evoting-api/src/services/VoteService.ts b/platforms/evoting-api/src/services/VoteService.ts index c366b197..bdfc0ab0 100644 --- a/platforms/evoting-api/src/services/VoteService.ts +++ b/platforms/evoting-api/src/services/VoteService.ts @@ -575,7 +575,7 @@ export class VoteService { voteCount: electionResult.optionResults[`option_${index}`] || 0 })); - const totalVoteCount = Object.values(electionResult.optionResults).reduce((sum, count) => sum + count, 0); + const totalVoteCount = Object.values(electionResult.optionResults).reduce((sum: number, count: number) => sum + count, 0); // Get group member count for voting turnout calculation let totalEligibleVoters = 0; @@ -694,11 +694,14 @@ export class VoteService { const commitments: Record = {}; const anchors: Record = {}; - for (const [optionId, commitment] of Object.entries(voteData.commitments)) { + const voteCommitments = voteData.commitments as Record; + const voteAnchors = voteData.anchors as Record; + + for (const [optionId, commitment] of Object.entries(voteCommitments)) { commitments[optionId] = Array.from(commitment).map(b => b.toString(16).padStart(2, '0')).join(''); } - for (const [optionId, anchor] of Object.entries(voteData.anchors)) { + for (const [optionId, anchor] of Object.entries(voteAnchors)) { anchors[optionId] = Array.from(anchor).map(b => b.toString(16).padStart(2, '0')).join(''); } diff --git a/platforms/group-charter-manager-api/src/services/UserService.ts b/platforms/group-charter-manager-api/src/services/UserService.ts index 8585b1f4..94a1d4e0 100644 --- a/platforms/group-charter-manager-api/src/services/UserService.ts +++ b/platforms/group-charter-manager-api/src/services/UserService.ts @@ -21,54 +21,42 @@ export class UserService { return await this.userRepository.save(user); } + /** + * Find a user by ename, regardless of whether the ename is stored with or without @ symbol + * @param ename - The ename to search for (with or without @ prefix) + * @returns The user if found, null otherwise + */ + async findByEname(ename: string): Promise { + // Normalize the input: remove @ if present for comparison + const normalizedEname = ename.startsWith('@') ? ename.slice(1) : ename; + const enameWithAt = `@${normalizedEname}`; + + // Search for user where ename matches either with or without @ + const user = await this.userRepository + .createQueryBuilder("user") + .leftJoinAndSelect("user.followers", "followers") + .leftJoinAndSelect("user.following", "following") + .where("user.ename = :enameWithAt OR user.ename = :enameWithoutAt", { + enameWithAt, + enameWithoutAt: normalizedEname, + }) + .getOne(); + + return user; + } + async findUserByEname( ename: string ): Promise<{ user: User; token: string }> { - let user: User | null = null; - - console.log(`🔍 Looking for user with ename: '${ename}'`); - - // Try to find user with the exact ename as provided - user = await this.userRepository.findOne({ - where: { ename: ename }, - }); - - if (user) { - console.log(`✅ Found user with exact ename: '${ename}'`); - } else { - // If not found and ename starts with @, try without @ - if (ename.startsWith('@')) { - const enameWithoutAt = ename.slice(1); - console.log(`🔍 Trying without @ prefix: '${enameWithoutAt}'`); - user = await this.userRepository.findOne({ - where: { ename: enameWithoutAt }, - }); - if (user) { - console.log(`✅ Found user without @ prefix: '${enameWithoutAt}'`); - } - } - - // If not found and ename doesn't start with @, try with @ - if (!user && !ename.startsWith('@')) { - const enameWithAt = `@${ename}`; - console.log(`🔍 Trying with @ prefix: '${enameWithAt}'`); - user = await this.userRepository.findOne({ - where: { ename: enameWithAt }, - }); - if (user) { - console.log(`✅ Found user with @ prefix: '${enameWithAt}'`); - } - } - } + // Find user by ename (handles @ symbol variations) + const user = await this.findByEname(ename); // If still no user found, throw an error - never create new users if (!user) { - console.log(`❌ No user found for ename: '${ename}' (tried with/without @ prefix)`); throw new Error(`User with ename '${ename}' not found. Cannot create new users automatically.`); } const token = signToken({ userId: user.id }); - console.log(`🎉 Successfully authenticated user: ${user.ename} (ID: ${user.id})`); return { user, token }; } diff --git a/platforms/marketplace/vite.config.ts b/platforms/marketplace/vite.config.ts index de066baf..a2a65332 100644 --- a/platforms/marketplace/vite.config.ts +++ b/platforms/marketplace/vite.config.ts @@ -1,6 +1,9 @@ import { defineConfig } from "vite"; import react from "@vitejs/plugin-react"; import path from "path"; +import { fileURLToPath } from "url"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); export default defineConfig({ plugins: [ @@ -8,13 +11,13 @@ export default defineConfig({ ], resolve: { alias: { - "@": path.resolve(import.meta.dirname, "client", "src"), - "@assets": path.resolve(import.meta.dirname, "assets"), + "@": path.resolve(__dirname, "client", "src"), + "@assets": path.resolve(__dirname, "assets"), }, }, - root: path.resolve(import.meta.dirname, "client"), + root: path.resolve(__dirname, "client"), build: { - outDir: path.resolve(import.meta.dirname, "dist/public"), + outDir: path.resolve(__dirname, "dist/public"), emptyOutDir: true, }, server: { @@ -23,5 +26,5 @@ export default defineConfig({ allow: [".."], }, }, - publicDir: path.resolve(import.meta.dirname, "assets"), + publicDir: path.resolve(__dirname, "assets"), }); diff --git a/platforms/pictique-api/src/controllers/AuthController.ts b/platforms/pictique-api/src/controllers/AuthController.ts index 2770f77d..2ca0ec97 100644 --- a/platforms/pictique-api/src/controllers/AuthController.ts +++ b/platforms/pictique-api/src/controllers/AuthController.ts @@ -2,6 +2,7 @@ import { Request, Response } from "express"; import { v4 as uuidv4 } from "uuid"; import { UserService } from "../services/UserService"; import { EventEmitter } from "events"; +import { signToken } from "../utils/jwt"; export class AuthController { private userService: UserService; private eventEmitter: EventEmitter; @@ -69,8 +70,15 @@ export class AuthController { return res.status(400).json({ error: "ename is required" }); } - const { user, token } = - await this.userService.findOrCreateUser(ename); + // Find user by ename (handles @ symbol variations) + let user = await this.userService.findByEname(ename); + + if (!user) { + throw new Error("User not found"); + } + + // Generate token + const token = signToken({ userId: user.id }); const data = { user: { diff --git a/platforms/pictique-api/src/controllers/WebhookController.ts b/platforms/pictique-api/src/controllers/WebhookController.ts index adcadfc5..903cb401 100644 --- a/platforms/pictique-api/src/controllers/WebhookController.ts +++ b/platforms/pictique-api/src/controllers/WebhookController.ts @@ -55,11 +55,19 @@ export class WebhookController { if (mapping.tableName === "users") { if (localId) { + console.log("DANGER", "user data being updated") + console.log("following user got fucked", local.data) + const user = await this.userService.findById(localId); + console.log(user) + console.log("---------------------------------------------------------------") + if (!user) throw new Error() + for (const key of Object.keys(local.data)) { // @ts-ignore - user[key] = local.data[key]; + user[key] = local.data[key] ?? user[key] } + user.handle = user.handle; if (!user) throw new Error(); user.name = req.body.data.displayName; await this.userService.userRepository.save(user); @@ -79,6 +87,7 @@ export class WebhookController { } user.name = req.body.data.displayName; await this.userService.userRepository.save(user); + console.log("user saved", user); await this.adapter.mappingDb.storeMapping({ localId: user.id, globalId: req.body.id, @@ -234,24 +243,53 @@ export class WebhookController { this.adapter.addToLockedIds(localId); await this.chatService.chatRepository.save(chat); } else { - const chat = await this.chatService.createChat( - local.data.name as string, - participants.map((p) => p.id) - ); - - this.adapter.addToLockedIds(chat.id); - await this.adapter.mappingDb.storeMapping({ - localId: chat.id, - globalId: req.body.id, - }); + // Check for existing DM (2 participants, no name) before creating + const participantIds = participants.map((p) => p.id); + const isDM = participantIds.length === 2 && !local.data.name; + + let chat; + if (isDM) { + const existingChat = await this.chatService.findChatByParticipants(participantIds); + if (existingChat) { + // Use existing chat and store mapping + chat = existingChat; + this.adapter.addToLockedIds(chat.id); + await this.adapter.mappingDb.storeMapping({ + localId: chat.id, + globalId: req.body.id, + }); + } else { + // Create new chat + chat = await this.chatService.createChat( + local.data.name as string, + participantIds + ); + this.adapter.addToLockedIds(chat.id); + await this.adapter.mappingDb.storeMapping({ + localId: chat.id, + globalId: req.body.id, + }); + } + } else { + // Group chat - always create new + chat = await this.chatService.createChat( + local.data.name as string, + participantIds + ); + this.adapter.addToLockedIds(chat.id); + await this.adapter.mappingDb.storeMapping({ + localId: chat.id, + globalId: req.body.id, + }); + } } } else if (mapping.tableName === "messages") { console.log("messages"); console.log(local.data); - + // Check if this is a system message const isSystemMessage = !local.data.sender || (typeof local.data.text === 'string' && local.data.text.startsWith('$$system-message$$')); - + let sender: User | null = null; if ( local.data.sender && @@ -276,7 +314,7 @@ export class WebhookController { console.log("Missing chat for system message"); return res.status(400).send(); } - + // System messages don't require a sender sender = null; } else { @@ -301,7 +339,7 @@ export class WebhookController { await this.messageService.messageRepository.save(message); } else { let message: Message; - + if (isSystemMessage) { // Create system message directly using MessageService console.log("Creating system message"); diff --git a/platforms/pictique-api/src/services/UserService.ts b/platforms/pictique-api/src/services/UserService.ts index 1034425c..7fa47b66 100644 --- a/platforms/pictique-api/src/services/UserService.ts +++ b/platforms/pictique-api/src/services/UserService.ts @@ -38,6 +38,28 @@ export class UserService { return await this.userRepository.findOneBy({ id }); } + /** + * Find a user by ename, regardless of whether the ename is stored with or without @ symbol + * @param ename - The ename to search for (with or without @ prefix) + * @returns The user if found, null otherwise + */ + async findByEname(ename: string): Promise { + // Normalize the input: remove @ if present for comparison + const normalizedEname = ename.startsWith('@') ? ename.slice(1) : ename; + const enameWithAt = `@${normalizedEname}`; + + // Search for user where ename matches either with or without @ + const user = await this.userRepository + .createQueryBuilder("user") + .where("user.ename = :enameWithAt OR user.ename = :enameWithoutAt", { + enameWithAt, + enameWithoutAt: normalizedEname, + }) + .getOne(); + + return user; + } + searchUsers = async ( query: string, page: number = 1, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 91f00f7e..d21b97e2 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -265,7 +265,7 @@ importers: version: 1.9.4 '@chromatic-com/storybook': specifier: ^3 - version: 3.2.6(react@18.3.1)(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3)) + version: 3.2.6(react@19.1.0)(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3)) '@storybook/addon-essentials': specifier: ^8.6.7 version: 8.6.14(@types/react@19.1.5)(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3)) @@ -274,10 +274,10 @@ importers: version: 8.6.14(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3)) '@storybook/blocks': specifier: ^8.6.7 - version: 8.6.14(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3)) + version: 8.6.14(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3)) '@storybook/experimental-addon-test': specifier: ^8.6.7 - version: 8.6.14(@vitest/browser@3.1.4)(@vitest/runner@3.1.4)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3))(vitest@3.1.4) + version: 8.6.14(@vitest/browser@3.1.4)(@vitest/runner@3.1.4)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3))(vitest@3.1.4) '@storybook/svelte': specifier: ^8.6.7 version: 8.6.14(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3))(svelte@5.33.1) @@ -340,7 +340,7 @@ importers: version: 5.33.1 svelte-check: specifier: ^4.0.0 - version: 4.2.1(picomatch@4.0.2)(svelte@5.33.1)(typescript@5.6.3) + version: 4.2.1(picomatch@4.0.3)(svelte@5.33.1)(typescript@5.6.3) svelte-gestures: specifier: ^5.1.3 version: 5.1.4 @@ -2324,7 +2324,7 @@ importers: devDependencies: '@chromatic-com/storybook': specifier: ^3 - version: 3.2.6(react@19.1.0)(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3)) + version: 3.2.6(react@18.3.1)(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3)) '@eslint/compat': specifier: ^1.2.5 version: 1.2.9(eslint@9.27.0(jiti@2.4.2)) @@ -2345,7 +2345,7 @@ importers: version: 5.0.1(@storybook/svelte@8.6.14(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3))(svelte@5.33.1))(@sveltejs/vite-plugin-svelte@5.0.3(svelte@5.33.1)(vite@6.3.5(@types/node@24.2.0)(jiti@2.4.2)(lightningcss@1.30.1)(sass@1.89.1)(tsx@4.19.4)(yaml@2.8.0)))(babel-plugin-macros@3.1.0)(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3))(svelte@5.33.1)(vite@6.3.5(@types/node@24.2.0)(jiti@2.4.2)(lightningcss@1.30.1)(sass@1.89.1)(tsx@4.19.4)(yaml@2.8.0)) '@storybook/blocks': specifier: ^8.6.12 - version: 8.6.14(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3)) + version: 8.6.14(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3)) '@storybook/svelte': specifier: ^8.6.12 version: 8.6.14(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3))(svelte@5.33.1) @@ -2402,7 +2402,7 @@ importers: version: 5.33.1 svelte-check: specifier: ^4.0.0 - version: 4.2.1(picomatch@4.0.3)(svelte@5.33.1)(typescript@5.8.3) + version: 4.2.1(picomatch@4.0.2)(svelte@5.33.1)(typescript@5.8.3) svelte-gestures: specifier: ^5.1.3 version: 5.1.4 @@ -2553,6 +2553,52 @@ importers: specifier: ^5.3.3 version: 5.8.3 + tests: + dependencies: + '@ngneat/falso': + specifier: ^7.3.0 + version: 7.3.0 + axios: + specifier: ^1.6.7 + version: 1.12.2 + cli-table3: + specifier: ^0.6.5 + version: 0.6.5 + dotenv: + specifier: ^16.4.5 + version: 16.5.0 + eventsource: + specifier: ^2.0.2 + version: 2.0.2 + firebase-admin: + specifier: ^13.4.0 + version: 13.4.0(encoding@0.1.13) + jsonwebtoken: + specifier: ^9.0.2 + version: 9.0.2 + uuid: + specifier: ^9.0.1 + version: 9.0.1 + devDependencies: + '@types/eventsource': + specifier: ^3.0.0 + version: 3.0.0 + '@types/jsonwebtoken': + specifier: ^9.0.5 + version: 9.0.9 + '@types/node': + specifier: ^20.11.24 + version: 20.16.11 + '@types/uuid': + specifier: ^9.0.8 + version: 9.0.8 + typescript: + specifier: ^5.3.3 + version: 5.8.2 + vitest: + specifier: ^2.1.0 + version: 2.1.9(@types/node@20.16.11)(jsdom@19.0.0(bufferutil@4.0.9))(lightningcss@1.30.1)(sass@1.89.1) + packages: '-@0.0.1': @@ -2909,6 +2955,10 @@ packages: '@codemirror/view@6.38.1': resolution: {integrity: sha512-RmTOkE7hRU3OVREqFVITWHz6ocgBjv08GoePscAakgVQfciA3SGCEk7mb9IzwW61cKKmlTpHXG6DUE5Ubx+MGQ==} + '@colors/colors@1.5.0': + resolution: {integrity: sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==} + engines: {node: '>=0.1.90'} + '@cspotcode/source-map-support@0.8.1': resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} engines: {node: '>=12'} @@ -6893,6 +6943,10 @@ packages: '@types/estree@1.0.8': resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} + '@types/eventsource@3.0.0': + resolution: {integrity: sha512-yEhFj31FTD29DtNeqePu+A+lD6loRef6YOM5XfN1kUwBHyy2DySGlA3jJU+FbQSkrfmlBVluf2Dub/OyReFGKA==} + deprecated: This is a stub types definition. eventsource provides its own type definitions, so you do not need this installed. + '@types/express-serve-static-core@4.19.6': resolution: {integrity: sha512-N4LZ2xG7DatVqhCZzOGb1Yi5lMbXSZcmdLDe9EzSndPV2HpWYWzRbaerl2n27irrm94EPpprqa8KpskPT085+A==} @@ -7429,12 +7483,26 @@ packages: '@vitest/expect@2.0.5': resolution: {integrity: sha512-yHZtwuP7JZivj65Gxoi8upUN2OzHTi3zVfjwdpu2WrvCZPLwsJ2Ey5ILIPccoW23dd/zQBlJ4/dhi7DWNyXCpA==} + '@vitest/expect@2.1.9': + resolution: {integrity: sha512-UJCIkTBenHeKT1TTlKMJWy1laZewsRIzYighyYiJKZreqtdxSos/S1t+ktRMQWu2CKqaarrkeszJx1cgC5tGZw==} + '@vitest/expect@3.1.4': resolution: {integrity: sha512-xkD/ljeliyaClDYqHPNCiJ0plY5YIcM0OlRiZizLhlPmpXWpxnGMyTZXOHFhFeG7w9P5PBeL4IdtJ/HeQwTbQA==} '@vitest/expect@3.2.4': resolution: {integrity: sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==} + '@vitest/mocker@2.1.9': + resolution: {integrity: sha512-tVL6uJgoUdi6icpxmdrn5YNo3g3Dxv+IHJBr0GXHaEdTcw3F+cPKnsXFhli6nO+f/6SDKPHEK1UN+k+TQv0Ehg==} + peerDependencies: + msw: ^2.4.9 + vite: ^5.0.0 + peerDependenciesMeta: + msw: + optional: true + vite: + optional: true + '@vitest/mocker@3.1.4': resolution: {integrity: sha512-8IJ3CvwtSw/EFXqWFL8aCMu+YyYXG2WUSrQbViOZkWTKTVicVwZ/YiEZDSqD00kX+v/+W+OnxhNWoeVKorHygA==} peerDependencies: @@ -7472,12 +7540,18 @@ packages: '@vitest/runner@1.6.1': resolution: {integrity: sha512-3nSnYXkVkf3mXFfE7vVyPmi3Sazhb/2cfZGGs0JRzFsPFvAMBEcrweV1V1GsrstdXeKCTXlJbvnQwGWgEIHmOA==} + '@vitest/runner@2.1.9': + resolution: {integrity: sha512-ZXSSqTFIrzduD63btIfEyOmNcBmQvgOVsPNPe0jYtESiXkhd8u2erDLnMxmGrDCwHCCHE7hxwRDCT3pt0esT4g==} + '@vitest/runner@3.1.4': resolution: {integrity: sha512-djTeF1/vt985I/wpKVFBMWUlk/I7mb5hmD5oP8K9ACRmVXgKTae3TUOtXAEBfslNKPzUQvnKhNd34nnRSYgLNQ==} '@vitest/snapshot@1.6.1': resolution: {integrity: sha512-WvidQuWAzU2p95u8GAKlRMqMyN1yOJkGHnx3M1PL9Raf7AQ1kwLKg04ADlCa3+OXUZE7BceOhVZiuWAbzCKcUQ==} + '@vitest/snapshot@2.1.9': + resolution: {integrity: sha512-oBO82rEjsxLNJincVhLhaxxZdEtV0EFHMK5Kmx5sJ6H9L183dHECjiefOAdnqpIgT5eZwT04PoggUnW88vOBNQ==} + '@vitest/snapshot@3.1.4': resolution: {integrity: sha512-JPHf68DvuO7vilmvwdPr9TS0SuuIzHvxeaCkxYcCD4jTk67XwL45ZhEHFKIuCm8CYstgI6LZ4XbwD6ANrwMpFg==} @@ -7487,6 +7561,9 @@ packages: '@vitest/spy@2.0.5': resolution: {integrity: sha512-c/jdthAhvJdpfVuaexSrnawxZz6pywlTPe84LUB2m/4t3rl2fTo9NFGBG4oWgaD+FTgDDV8hJ/nibT7IfH3JfA==} + '@vitest/spy@2.1.9': + resolution: {integrity: sha512-E1B35FwzXXTs9FHNK6bDszs7mtydNi5MIfUWpceJ8Xbfb1gBMscAnwLbEu+B44ed6W3XjL9/ehLPHR1fkf1KLQ==} + '@vitest/spy@3.1.4': resolution: {integrity: sha512-Xg1bXhu+vtPXIodYN369M86K8shGLouNjoVI78g8iAq2rFoHFdajNvJJ5A/9bPMFcfQqdaCpOgWKEoMQg/s0Yg==} @@ -8258,6 +8335,10 @@ packages: resolution: {integrity: sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + cli-table3@0.6.5: + resolution: {integrity: sha512-+W/5efTR7y5HRD7gACw9yQjqMVvEMLBHmboM/kPWam+H+Hmyrgjh6YncVKK122YZkXrLudzTuAukUw9FnMf7IQ==} + engines: {node: 10.* || >= 12.*} + cli-truncate@3.1.0: resolution: {integrity: sha512-wfOBkjXteqSnI59oPcJkcPl/ZmwvMMOj340qUIY1SKZCv0B9Cf4D4fAucRkIKQmsIuYK3x1rrgU7MeGRruiuiA==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -9467,6 +9548,10 @@ packages: eventsource-polyfill@0.9.6: resolution: {integrity: sha512-LyMFp2oPDGhum2lMvkjqKZEwWd2/AoXyt8aoyftTBMWwPHNgU+2tdxhTHPluDxoz+z4gNj0uHAPR9nqevATMbg==} + eventsource@2.0.2: + resolution: {integrity: sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==} + engines: {node: '>=12.0.0'} + evp_bytestokey@1.0.3: resolution: {integrity: sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==} @@ -14379,6 +14464,11 @@ packages: engines: {node: ^18.0.0 || >=20.0.0} hasBin: true + vite-node@2.1.9: + resolution: {integrity: sha512-AM9aQ/IPrW/6ENLQg3AGY4K1N2TGZdR5e4gu/MmmR2xR3Ll1+dib+nook92g4TV3PXVyeyxdWwtaCAiUL0hMxA==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + vite-node@3.1.4: resolution: {integrity: sha512-6enNwYnpyDo4hEgytbmc6mYWHXDHYEn0D1/rw4Q+tnHUGtKTJsn8T1YkX6Q18wI5LCrS8CTYlBaiCqxOy2kvUA==} engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} @@ -14541,6 +14631,31 @@ packages: jsdom: optional: true + vitest@2.1.9: + resolution: {integrity: sha512-MSmPM9REYqDGBI8439mA4mWhV5sKmDlBKWIYbA3lRb2PTHACE0mgKwA8yQ2xq9vxDTuk4iPrECBAEW2aoFXY0Q==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + peerDependencies: + '@edge-runtime/vm': '*' + '@types/node': ^18.0.0 || >=20.0.0 + '@vitest/browser': 2.1.9 + '@vitest/ui': 2.1.9 + happy-dom: '*' + jsdom: '*' + peerDependenciesMeta: + '@edge-runtime/vm': + optional: true + '@types/node': + optional: true + '@vitest/browser': + optional: true + '@vitest/ui': + optional: true + happy-dom: + optional: true + jsdom: + optional: true + vitest@3.1.4: resolution: {integrity: sha512-Ta56rT7uWxCSJXlBtKgIlApJnT6e6IGmTYxYcmxjJ4ujuZDI59GUQgVDObXXJujOmPDBYXHK1qmaGtneu6TNIQ==} engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} @@ -15374,6 +15489,9 @@ snapshots: style-mod: 4.1.2 w3c-keyname: 2.2.8 + '@colors/colors@1.5.0': + optional: true + '@cspotcode/source-map-support@0.8.1': dependencies: '@jridgewell/trace-mapping': 0.3.9 @@ -19080,10 +19198,10 @@ snapshots: dependencies: type-fest: 2.19.0 - '@storybook/experimental-addon-test@8.6.14(@vitest/browser@3.1.4)(@vitest/runner@3.1.4)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3))(vitest@3.1.4)': + '@storybook/experimental-addon-test@8.6.14(@vitest/browser@3.1.4)(@vitest/runner@3.1.4)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3))(vitest@3.1.4)': dependencies: '@storybook/global': 5.0.0 - '@storybook/icons': 1.4.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@storybook/icons': 1.4.0(react-dom@19.1.0(react@19.1.0))(react@19.1.0) '@storybook/instrumenter': 8.6.14(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3)) '@storybook/test': 8.6.14(storybook@8.6.14(bufferutil@4.0.9)(prettier@3.5.3)) polished: 4.3.1 @@ -20178,6 +20296,10 @@ snapshots: '@types/estree@1.0.8': {} + '@types/eventsource@3.0.0': + dependencies: + eventsource: 2.0.2 + '@types/express-serve-static-core@4.19.6': dependencies: '@types/node': 20.16.11 @@ -20973,6 +21095,13 @@ snapshots: chai: 5.2.0 tinyrainbow: 1.2.0 + '@vitest/expect@2.1.9': + dependencies: + '@vitest/spy': 2.1.9 + '@vitest/utils': 2.1.9 + chai: 5.2.0 + tinyrainbow: 1.2.0 + '@vitest/expect@3.1.4': dependencies: '@vitest/spy': 3.1.4 @@ -20988,6 +21117,14 @@ snapshots: chai: 5.2.0 tinyrainbow: 2.0.0 + '@vitest/mocker@2.1.9(vite@5.4.19(@types/node@20.16.11)(lightningcss@1.30.1)(sass@1.89.1))': + dependencies: + '@vitest/spy': 2.1.9 + estree-walker: 3.0.3 + magic-string: 0.30.17 + optionalDependencies: + vite: 5.4.19(@types/node@20.16.11)(lightningcss@1.30.1)(sass@1.89.1) + '@vitest/mocker@3.1.4(vite@6.3.5(@types/node@22.15.21)(jiti@2.4.2)(lightningcss@1.30.1)(sass@1.89.1)(tsx@4.19.4)(yaml@2.8.0))': dependencies: '@vitest/spy': 3.1.4 @@ -21034,6 +21171,11 @@ snapshots: p-limit: 5.0.0 pathe: 1.1.2 + '@vitest/runner@2.1.9': + dependencies: + '@vitest/utils': 2.1.9 + pathe: 1.1.2 + '@vitest/runner@3.1.4': dependencies: '@vitest/utils': 3.1.4 @@ -21045,6 +21187,12 @@ snapshots: pathe: 1.1.2 pretty-format: 29.7.0 + '@vitest/snapshot@2.1.9': + dependencies: + '@vitest/pretty-format': 2.1.9 + magic-string: 0.30.17 + pathe: 1.1.2 + '@vitest/snapshot@3.1.4': dependencies: '@vitest/pretty-format': 3.1.4 @@ -21059,6 +21207,10 @@ snapshots: dependencies: tinyspy: 3.0.2 + '@vitest/spy@2.1.9': + dependencies: + tinyspy: 3.0.2 + '@vitest/spy@3.1.4': dependencies: tinyspy: 3.0.2 @@ -21084,7 +21236,7 @@ snapshots: '@vitest/utils@2.1.9': dependencies: '@vitest/pretty-format': 2.1.9 - loupe: 3.1.3 + loupe: 3.2.0 tinyrainbow: 1.2.0 '@vitest/utils@3.1.4': @@ -22007,6 +22159,12 @@ snapshots: dependencies: restore-cursor: 4.0.0 + cli-table3@0.6.5: + dependencies: + string-width: 4.2.3 + optionalDependencies: + '@colors/colors': 1.5.0 + cli-truncate@3.1.0: dependencies: slice-ansi: 5.0.0 @@ -23692,6 +23850,8 @@ snapshots: eventsource-polyfill@0.9.6: {} + eventsource@2.0.2: {} + evp_bytestokey@1.0.3: dependencies: md5.js: 1.3.5 @@ -29061,7 +29221,7 @@ snapshots: svelte: 5.33.1 zimmerframe: 1.1.2 - svelte-check@4.2.1(picomatch@4.0.2)(svelte@5.33.1)(typescript@5.6.3): + svelte-check@4.2.1(picomatch@4.0.2)(svelte@5.33.1)(typescript@5.8.3): dependencies: '@jridgewell/trace-mapping': 0.3.25 chokidar: 4.0.3 @@ -29069,6 +29229,18 @@ snapshots: picocolors: 1.1.1 sade: 1.8.1 svelte: 5.33.1 + typescript: 5.8.3 + transitivePeerDependencies: + - picomatch + + svelte-check@4.2.1(picomatch@4.0.3)(svelte@5.33.1)(typescript@5.6.3): + dependencies: + '@jridgewell/trace-mapping': 0.3.25 + chokidar: 4.0.3 + fdir: 6.4.4(picomatch@4.0.3) + picocolors: 1.1.1 + sade: 1.8.1 + svelte: 5.33.1 typescript: 5.6.3 transitivePeerDependencies: - picomatch @@ -30278,6 +30450,24 @@ snapshots: - supports-color - terser + vite-node@2.1.9(@types/node@20.16.11)(lightningcss@1.30.1)(sass@1.89.1): + dependencies: + cac: 6.7.14 + debug: 4.4.1(supports-color@5.5.0) + es-module-lexer: 1.7.0 + pathe: 1.1.2 + vite: 5.4.19(@types/node@20.16.11)(lightningcss@1.30.1)(sass@1.89.1) + transitivePeerDependencies: + - '@types/node' + - less + - lightningcss + - sass + - sass-embedded + - stylus + - sugarss + - supports-color + - terser + vite-node@3.1.4(@types/node@22.15.21)(jiti@2.4.2)(lightningcss@1.30.1)(sass@1.89.1)(tsx@4.19.4)(yaml@2.8.0): dependencies: cac: 6.7.14 @@ -30437,6 +30627,42 @@ snapshots: - supports-color - terser + vitest@2.1.9(@types/node@20.16.11)(jsdom@19.0.0(bufferutil@4.0.9))(lightningcss@1.30.1)(sass@1.89.1): + dependencies: + '@vitest/expect': 2.1.9 + '@vitest/mocker': 2.1.9(vite@5.4.19(@types/node@20.16.11)(lightningcss@1.30.1)(sass@1.89.1)) + '@vitest/pretty-format': 2.1.9 + '@vitest/runner': 2.1.9 + '@vitest/snapshot': 2.1.9 + '@vitest/spy': 2.1.9 + '@vitest/utils': 2.1.9 + chai: 5.2.0 + debug: 4.4.1(supports-color@5.5.0) + expect-type: 1.2.1 + magic-string: 0.30.17 + pathe: 1.1.2 + std-env: 3.9.0 + tinybench: 2.9.0 + tinyexec: 0.3.2 + tinypool: 1.0.2 + tinyrainbow: 1.2.0 + vite: 5.4.19(@types/node@20.16.11)(lightningcss@1.30.1)(sass@1.89.1) + vite-node: 2.1.9(@types/node@20.16.11)(lightningcss@1.30.1)(sass@1.89.1) + why-is-node-running: 2.3.0 + optionalDependencies: + '@types/node': 20.16.11 + jsdom: 19.0.0(bufferutil@4.0.9) + transitivePeerDependencies: + - less + - lightningcss + - msw + - sass + - sass-embedded + - stylus + - sugarss + - supports-color + - terser + vitest@3.1.4(@types/debug@4.1.12)(@types/node@22.15.21)(@vitest/browser@3.1.4)(jiti@2.4.2)(jsdom@19.0.0(bufferutil@4.0.9))(lightningcss@1.30.1)(sass@1.89.1)(tsx@4.19.4)(yaml@2.8.0): dependencies: '@vitest/expect': 3.1.4 diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 74f12989..e7c7bc14 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -3,6 +3,7 @@ packages: - packages/* - platforms/* - infrastructure/* + - tests/ onlyBuiltDependencies: - '@biomejs/biome' - cpu-features diff --git a/tests/.gitignore b/tests/.gitignore new file mode 100644 index 00000000..bc00e7d1 --- /dev/null +++ b/tests/.gitignore @@ -0,0 +1,5 @@ +node_modules/ +dist/ +*.log +.test-users-cache.json + diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 00000000..0cbdbbc2 --- /dev/null +++ b/tests/README.md @@ -0,0 +1,139 @@ +# Staging Load Tests + +This directory contains staging load tests for the W3DS sync system. These tests run against already hosted code to simulate real-world load and verify bidirectional sync between blabsy and pictique platforms. + +## Overview + +The test suite simulates configurable concurrent users (default: 2, can be scaled up to 50+) performing various operations across both platforms, verifying that data syncs correctly between: +- **blabsy** (Firebase-based social platform) +- **pictique** (API-based social platform) + +## Test Coverage + +The suite tests sync for: +- **Users**: Profile updates (name, bio, avatar, location, website) +- **Posts/Tweets**: Content creation and updates +- **Comments/Replies**: Threaded discussions +- **Likes**: Engagement actions +- **Messages**: Direct messaging +- **Chats/Groups**: Group conversations + +## Setup + +1. Install dependencies: +```bash +cd staging-load-tests +npm install +``` + +2. Ensure environment variables are set in the root `.env` file: + - `PUBLIC_PICTIQUE_BASE_URL` - Base URL for pictique API (required) + - `PUBLIC_BLABSY_BASE_URL` - Base URL for blabsy API (required) + - `PUBLIC_REGISTRY_URL` - Base URL for registry service (required for eName provisioning) + - `PUBLIC_PROVISIONER_URL` - Base URL for evault-core provisioning service (required for eName provisioning) + - `GOOGLE_APPLICATION_CREDENTIALS` - Path to Firebase Admin credentials JSON file (relative to project root) + - `FIREBASE_PROJECT_ID` - Firebase project ID (optional) + - `JWT_SECRET` - JWT secret for pictique API (optional, defaults to 'your-secret-key') + - `DEMO_CODE_W3DS` - Demo code for W3DS verification (optional, defaults to 'd66b7138-538a-465f-a6ce-f6985854c3f4') + - `LOAD_TEST_USER_COUNT` - Number of users for concurrent load test (optional, defaults to 2) + - `CLEAR_USER_CACHE` - Set to `true` to force recreation of test users (optional, defaults to false - uses cache) + +## Running Tests + +### Run all tests +```bash +npm test +# or from root: +npm run test:staging-load +``` + +### Run in watch mode +```bash +npm run test:watch +# or from root: +npm run test:staging-load:watch +``` + +### Run specific test file +```bash +npm test -- user-sync.test.ts +npm test -- concurrent-load.test.ts +``` + +### Run with UI (interactive) +```bash +npm run test:ui +``` + +## Test Framework + +This test suite uses **Vitest** for better real-time status reporting and faster execution. Vitest provides: +- Real-time test status updates +- Better progress indicators for long-running tests +- Faster execution compared to Jest +- Native TypeScript support + +## Test Structure + +- `src/config/` - Configuration and environment setup +- `src/utils/` - Utility functions for Firebase, API clients, user factory, and sync verification +- `src/scenarios/` - Individual test scenarios for each entity type +- `src/load/` - Main load test orchestrator with configurable concurrent users (default: 2, set via `LOAD_TEST_USER_COUNT`) + +## User Personas + +The load test simulates different user behavior patterns: +- **Content Creator**: Creates many posts/tweets, some comments, few likes +- **Commenter**: Creates few posts, many comments, moderate likes +- **Liker**: Creates very few posts, few comments, many likes +- **Messenger**: Focuses on messaging and chat interactions +- **Balanced**: Balanced activity across all types + +## Sync Timing + +- Expected sync time: ~15 seconds +- Test buffer time: 30 seconds +- Prevention window: 15 seconds (entities updated within this window won't update again) + +## User Creation Process + +Test users are created using the following process: + +1. **eName Provisioning**: Each user's eName is provisioned via evault-core: + - Gets entropy token from `PUBLIC_REGISTRY_URL/entropy` endpoint + - Uses a random UUID as the namespace + - Provisions eName via evault-core `/provision` endpoint + - Returns the provisioned w3id (eName) in `@` format + +2. **Firebase User Creation**: + - Creates user in Firebase Auth using the provisioned eName as the UID + - Creates user document in Firestore with the eName + - Username is set to the eName without the `@` prefix + - Users automatically sync to pictique (no API calls needed) + +**Important**: Users are NEVER created through pictique-api. All users are created in Firebase first, and sync happens automatically. + +## User Caching + +Test users are automatically cached to avoid recreation on each test run: +- Users are saved to `.test-users-cache.json` after creation +- Subsequent test runs will reuse cached users if available +- Cache is validated to ensure it has enough users for the requested count +- To force recreation, set `CLEAR_USER_CACHE=true` environment variable +- Cache file is gitignored and should not be committed + +## Performance Optimizations + +- **User Creation**: Users are created in parallel batches (5 at a time) for faster setup +- **Parallel Execution**: After initial user setup, all user activities run in parallel +- **Caching**: User data is cached to skip recreation on subsequent runs +- **Token Caching**: Auth tokens are obtained in parallel for all users + +## Notes + +- Tests create real users in Firebase, which will automatically sync to pictique +- Users are NOT deleted after tests (deletion is not supported for sync) +- After initial setup, all user activities run in parallel for maximum load simulation +- Each user performs multiple operations with realistic delays between actions +- eName provisioning happens before each user is created in Firebase + diff --git a/tests/package.json b/tests/package.json new file mode 100644 index 00000000..362493ca --- /dev/null +++ b/tests/package.json @@ -0,0 +1,29 @@ +{ + "name": "staging-load-tests", + "version": "1.0.0", + "private": true, + "description": "Staging load tests for W3DS sync system", + "scripts": { + "test": "vitest run", + "test:watch": "vitest", + "test:ui": "vitest --ui" + }, + "dependencies": { + "@ngneat/falso": "^7.3.0", + "axios": "^1.6.7", + "cli-table3": "^0.6.5", + "dotenv": "^16.4.5", + "eventsource": "^2.0.2", + "firebase-admin": "^13.4.0", + "jsonwebtoken": "^9.0.2", + "uuid": "^9.0.1" + }, + "devDependencies": { + "@types/eventsource": "^3.0.0", + "@types/jsonwebtoken": "^9.0.5", + "@types/node": "^20.11.24", + "@types/uuid": "^9.0.8", + "typescript": "^5.3.3", + "vitest": "^2.1.0" + } +} diff --git a/tests/src/config/env.ts b/tests/src/config/env.ts new file mode 100644 index 00000000..5702d95a --- /dev/null +++ b/tests/src/config/env.ts @@ -0,0 +1,48 @@ +import dotenv from 'dotenv'; +import path from 'path'; + +// Load environment variables from root .env file + +const envPath = path.resolve(__dirname, "../../../.env") +dotenv.config({ path: envPath }); + +export interface TestConfig { + pictiqueBaseUri: string; + blabsyBaseUri: string; + firebaseProjectId?: string; + googleApplicationCredentials?: string; + jwtSecret?: string; + syncWaitTime: number; // Expected sync time in ms + syncBufferTime: number; // Buffer time in ms + preventionWindow: number; // Prevention window in ms (15 seconds) + userCount: number; // Number of users for load tests + registryUrl?: string; // Registry URL for entropy generation + provisionerUrl?: string; // Provisioner URL (evault-core) for eName provisioning + demoCodeW3DS?: string; // Demo code for W3DS verification +} + +const requiredEnvVars = ['PUBLIC_PICTIQUE_BASE_URL', 'PUBLIC_BLABSY_BASE_URL']; + +for (const envVar of requiredEnvVars) { + if (!process.env[envVar]) { + throw new Error(`Missing required environment variable: ${envVar}, ${envPath}`); + } +} + +export const config: TestConfig = { + pictiqueBaseUri: process.env.PUBLIC_PICTIQUE_BASE_URL!, + blabsyBaseUri: process.env.PUBLIC_BLABSY_BASE_URL!, + firebaseProjectId: process.env.FIREBASE_PROJECT_ID, + googleApplicationCredentials: process.env.GOOGLE_APPLICATION_CREDENTIALS, + jwtSecret: process.env.JWT_SECRET || 'your-secret-key', + syncWaitTime: 15000, // 15 seconds expected sync time + syncBufferTime: 30000, // 30 seconds buffer + preventionWindow: 15000, // 15 seconds prevention window + userCount: parseInt(process.env.LOAD_TEST_USER_COUNT || '2', 10), // Default to 2 users + registryUrl: process.env.PUBLIC_REGISTRY_URL, + provisionerUrl: process.env.PUBLIC_PROVISIONER_URL, + demoCodeW3DS: process.env.DEMO_CODE_W3DS || 'd66b7138-538a-465f-a6ce-f6985854c3f4', +}; + +export default config; + diff --git a/tests/src/factories/index.ts b/tests/src/factories/index.ts new file mode 100644 index 00000000..910bfa2d --- /dev/null +++ b/tests/src/factories/index.ts @@ -0,0 +1,11 @@ +export { Platform } from './platform.enum'; +export { TestSocialUser } from './test-social-user'; +export { TestSocialUserFactory } from './test-social-user-factory'; +export type { + CreatedPost, + CreatedComment, + CreatedLike, + CreatedChat, + CreatedMessage, +} from './test-social-user'; + diff --git a/tests/src/factories/platform.enum.ts b/tests/src/factories/platform.enum.ts new file mode 100644 index 00000000..2a117d84 --- /dev/null +++ b/tests/src/factories/platform.enum.ts @@ -0,0 +1,5 @@ +export enum Platform { + BLABSY = 'blabsy', + PICTIQUE = 'pictique', +} + diff --git a/tests/src/factories/test-social-user-factory.ts b/tests/src/factories/test-social-user-factory.ts new file mode 100644 index 00000000..097c73e8 --- /dev/null +++ b/tests/src/factories/test-social-user-factory.ts @@ -0,0 +1,34 @@ +import { Platform } from './platform.enum'; +import { TestSocialUser } from './test-social-user'; + +export class TestSocialUserFactory { + /** + * Create a TestSocialUser instance for the specified platform + */ + static create(platform: Platform, ename: string): TestSocialUser { + return new TestSocialUser(platform, ename); + } + + /** + * Create TestSocialUser instances for both platforms from a single ename + */ + static createForBothPlatforms(ename: string): { + blabsy: TestSocialUser; + pictique: TestSocialUser; + } { + return { + blabsy: new TestSocialUser(Platform.BLABSY, ename), + pictique: new TestSocialUser(Platform.PICTIQUE, ename), + }; + } + + /** + * Create a TestSocialUser instance with a random platform + */ + static createRandomPlatform(ename: string): TestSocialUser { + const platforms = [Platform.BLABSY, Platform.PICTIQUE]; + const randomPlatform = platforms[Math.floor(Math.random() * platforms.length)]; + return new TestSocialUser(randomPlatform, ename); + } +} + diff --git a/tests/src/factories/test-social-user.ts b/tests/src/factories/test-social-user.ts new file mode 100644 index 00000000..0815b64d --- /dev/null +++ b/tests/src/factories/test-social-user.ts @@ -0,0 +1,328 @@ +import { Platform } from './platform.enum'; +import { getAuthToken, getApiClient, getUserChats, getChatMessages, getPostComments } from '../utils/api-client'; +import { TestUser } from '../utils/user-factory'; +import { getFirestore } from 'firebase-admin/firestore'; +import { initializeFirebase } from '../utils/user-factory'; + +// Blabsy populators +import { createPost as createBlabsyPost } from '../populators/blabsy/posts'; +import { createComment as createBlabsyComment } from '../populators/blabsy/comments'; +import { createLike as createBlabsyLike } from '../populators/blabsy/likes'; +import { createChat as createBlabsyChat } from '../populators/blabsy/chats'; +import { createMessage as createBlabsyMessage } from '../populators/blabsy/messages'; + +// Pictique populators +import { createPost as createPictiquePost } from '../populators/pictique/posts'; +import { createComment as createPictiqueComment } from '../populators/pictique/comments'; +import { createLike as createPictiqueLike } from '../populators/pictique/likes'; +import { createChat as createPictiqueChat } from '../populators/pictique/chats'; +import { createMessage as createPictiqueMessage } from '../populators/pictique/messages'; + +export interface CreatedPost { + id: string; + text: string | null; + authorId: string; // ename for Blabsy, user ID for Pictique +} + +export interface CreatedComment { + id: string; + text: string; + authorId: string; // ename for Blabsy, user ID for Pictique + parentId: string; // post/tweet ID +} + +export interface CreatedLike { + userId: string; // ename + postId: string; // post/tweet ID + isLiked: boolean; +} + +export interface CreatedChat { + id: string; + participants: string[]; + name?: string; +} + +export interface CreatedMessage { + id: string; + chatId: string; + senderId: string; + text: string; +} + +export class TestSocialUser { + private token: string | null = null; + private tokenPromise: Promise | null = null; + public readonly metadata: { + platform: Platform; + ename: string; + createdAt: Date; + }; + + constructor( + public readonly platform: Platform, + public readonly ename: string + ) { + this.metadata = { + platform, + ename, + createdAt: new Date(), + }; + } + + /** + * Get or fetch authentication token + */ + private async getToken(): Promise { + if (this.token) { + return this.token; + } + + if (this.tokenPromise) { + return this.tokenPromise; + } + + if (this.platform === Platform.BLABSY) { + // Blabsy doesn't need a token (uses Firestore directly) + throw new Error('Blabsy platform does not require authentication token'); + } + + // For Pictique, fetch the token + this.tokenPromise = getAuthToken(this.ename); + this.token = await this.tokenPromise; + return this.token; + } + + /** + * Create a post + */ + async createPost(text: string): Promise { + if (this.platform === Platform.BLABSY) { + const result = await createBlabsyPost(this.ename, text); + return { + id: result.id, + text: result.text, + authorId: result.createdBy, // ename for Blabsy + }; + } else { + const token = await this.getToken(); + const result = await createPictiquePost(token, text); + return { + id: result.id, + text: result.text, + authorId: result.authorId, // user ID for Pictique + }; + } + } + + /** + * Create a comment/reply + */ + async createComment(parentId: string, text: string): Promise { + if (this.platform === Platform.BLABSY) { + const result = await createBlabsyComment(this.ename, parentId, text); + return { + id: result.id, + text: result.text, + authorId: result.createdBy, // ename for Blabsy + parentId: result.parentId, + }; + } else { + const token = await this.getToken(); + const result = await createPictiqueComment(token, parentId, text); + return { + id: result.id, + text: result.text, + authorId: result.authorId, // user ID for Pictique + parentId: result.postId, + }; + } + } + + /** + * Create a like + */ + async createLike(postId: string): Promise { + if (this.platform === Platform.BLABSY) { + const result = await createBlabsyLike(this.ename, postId); + return { + userId: result.userId, + postId: result.tweetId, // tweetId -> postId + isLiked: result.isLiked, + }; + } else { + const token = await this.getToken(); + const result = await createPictiqueLike(token, postId, this.ename); + return { + userId: result.userId, + postId: result.postId, + isLiked: result.isLiked, + }; + } + } + + /** + * Create a chat + * participantEnames: array of enames (e.g., ["@user1", "@user2"]) + */ + async createChat(participantEnames: string[], name?: string): Promise { + if (this.platform === Platform.BLABSY) { + return await createBlabsyChat(participantEnames, name); + } else { + const token = await this.getToken(); + const createdChat = await createPictiqueChat(token, participantEnames); + + // Re-fetch the chat to get full participant data with handles/enames + const { getChat } = await import('../utils/api-client'); + const fullChat = await getChat(createdChat.id, token); + + return { + id: fullChat.id, + participants: fullChat.participants || createdChat.participants, + name: fullChat.name || createdChat.name, + }; + } + } + + /** + * Create a message + */ + async createMessage(chatId: string, text: string): Promise { + if (this.platform === Platform.BLABSY) { + return await createBlabsyMessage(chatId, this.ename, text); + } else { + const token = await this.getToken(); + return await createPictiqueMessage(token, chatId, text, this.ename); + } + } + + /** + * Get all posts/tweets + */ + async getAllPosts(): Promise { + if (this.platform === Platform.BLABSY) { + initializeFirebase(); + const db = getFirestore(); + const tweetsSnapshot = await db.collection('tweets').get(); + // Filter out replies (only return posts without parent) + return tweetsSnapshot.docs + .map(doc => ({ id: doc.id, ...doc.data() })) + .filter((tweet: any) => !tweet.parent); + } else { + const token = await this.getToken(); + const client = getApiClient(); + const response = await client.get('/api/posts/feed', { + headers: { + Authorization: `Bearer ${token}`, + }, + params: { + limit: 1000, + }, + }); + + // Handle different response structures + if (Array.isArray(response.data)) { + return response.data; + } else if (response.data && Array.isArray(response.data.posts)) { + return response.data.posts; + } else if (response.data && Array.isArray(response.data.data)) { + return response.data.data; + } + return []; + } + } + + /** + * Get all chats + */ + async getAllChats(): Promise { + if (this.platform === Platform.BLABSY) { + initializeFirebase(); + const db = getFirestore(); + const chatsSnapshot = await db.collection('chats').get(); + return chatsSnapshot.docs.map(doc => ({ + id: doc.id, + ...doc.data(), + })); + } else { + const token = await this.getToken(); + const chatsResponse: any = await getUserChats(token); + + // Handle different response structures + if (Array.isArray(chatsResponse)) { + return chatsResponse; + } else if (chatsResponse && Array.isArray(chatsResponse.chats)) { + return chatsResponse.chats; + } else if (chatsResponse && Array.isArray(chatsResponse.data)) { + return chatsResponse.data; + } + return []; + } + } + + /** + * Get all messages for a specific chat + */ + async getAllMessages(chatId: string): Promise { + if (this.platform === Platform.BLABSY) { + initializeFirebase(); + const db = getFirestore(); + const messagesSnapshot = await db + .collection('chats') + .doc(chatId) + .collection('messages') + .orderBy('createdAt', 'desc') + .get(); + return messagesSnapshot.docs.map(doc => ({ + id: doc.id, + ...doc.data(), + })); + } else { + const token = await this.getToken(); + const messagesResponse: any = await getChatMessages(chatId, token); + + // Handle different response structures + if (Array.isArray(messagesResponse)) { + return messagesResponse; + } else if (messagesResponse && Array.isArray(messagesResponse.messages)) { + return messagesResponse.messages; + } else if (messagesResponse && Array.isArray(messagesResponse.data)) { + return messagesResponse.data; + } + return []; + } + } + + /** + * Get all comments for a specific post + */ + async getAllComments(postId: string): Promise { + if (this.platform === Platform.BLABSY) { + initializeFirebase(); + const db = getFirestore(); + // Comments in Blabsy are replies (tweets with parent) + const repliesSnapshot = await db + .collection('tweets') + .where('parent.id', '==', postId) + .get(); + return repliesSnapshot.docs.map(doc => ({ + id: doc.id, + ...doc.data(), + })); + } else { + const token = await this.getToken(); + const commentsResponse: any = await getPostComments(postId, token); + + // Handle different response structures + if (Array.isArray(commentsResponse)) { + return commentsResponse; + } else if (commentsResponse && Array.isArray(commentsResponse.comments)) { + return commentsResponse.comments; + } else if (commentsResponse && Array.isArray(commentsResponse.data)) { + return commentsResponse.data; + } + return []; + } + } +} + diff --git a/tests/src/populators/blabsy/chats.ts b/tests/src/populators/blabsy/chats.ts new file mode 100644 index 00000000..eea6cbc0 --- /dev/null +++ b/tests/src/populators/blabsy/chats.ts @@ -0,0 +1,24 @@ +import { createChat as createChatInFirestore } from '../../utils/firebase-client'; + +export interface CreatedChat { + id: string; + participants: string[]; + name?: string; +} + +/** + * Create a chat on Blabsy + */ +export async function createChat( + participants: string[], + name?: string +): Promise { + const chatId = await createChatInFirestore(participants, name); + + return { + id: chatId, + participants, + name, + }; +} + diff --git a/tests/src/populators/blabsy/comments.ts b/tests/src/populators/blabsy/comments.ts new file mode 100644 index 00000000..f2e0dffb --- /dev/null +++ b/tests/src/populators/blabsy/comments.ts @@ -0,0 +1,27 @@ +import { createReply } from '../../utils/firebase-client'; + +export interface CreatedComment { + id: string; + text: string; + createdBy: string; + parentId: string; +} + +/** + * Create a comment (reply) on Blabsy + */ +export async function createComment( + userId: string, + parentTweetId: string, + text: string +): Promise { + const replyId = await createReply(userId, parentTweetId, text); + + return { + id: replyId, + text, + createdBy: userId, + parentId: parentTweetId, + }; +} + diff --git a/tests/src/populators/blabsy/likes.ts b/tests/src/populators/blabsy/likes.ts new file mode 100644 index 00000000..dbf67618 --- /dev/null +++ b/tests/src/populators/blabsy/likes.ts @@ -0,0 +1,21 @@ +import { toggleLike } from '../../utils/firebase-client'; + +export interface CreatedLike { + userId: string; + tweetId: string; + isLiked: boolean; +} + +/** + * Create a like on Blabsy + */ +export async function createLike(userId: string, tweetId: string): Promise { + await toggleLike(userId, tweetId, true); + + return { + userId, + tweetId, + isLiked: true, + }; +} + diff --git a/tests/src/populators/blabsy/messages.ts b/tests/src/populators/blabsy/messages.ts new file mode 100644 index 00000000..ce1de293 --- /dev/null +++ b/tests/src/populators/blabsy/messages.ts @@ -0,0 +1,27 @@ +import { sendMessage } from '../../utils/firebase-client'; + +export interface CreatedMessage { + id: string; + chatId: string; + senderId: string; + text: string; +} + +/** + * Create a message on Blabsy + */ +export async function createMessage( + chatId: string, + senderId: string, + text: string +): Promise { + const messageId = await sendMessage(chatId, senderId, text); + + return { + id: messageId, + chatId, + senderId, + text, + }; +} + diff --git a/tests/src/populators/blabsy/posts.ts b/tests/src/populators/blabsy/posts.ts new file mode 100644 index 00000000..5c6a211d --- /dev/null +++ b/tests/src/populators/blabsy/posts.ts @@ -0,0 +1,21 @@ +import { createTweet } from '../../utils/firebase-client'; + +export interface CreatedPost { + id: string; + text: string | null; + createdBy: string; +} + +/** + * Create a post (tweet) on Blabsy + */ +export async function createPost(userId: string, text: string): Promise { + const tweetId = await createTweet(userId, text); + + return { + id: tweetId, + text, + createdBy: userId, + }; +} + diff --git a/tests/src/populators/pictique/chats.ts b/tests/src/populators/pictique/chats.ts new file mode 100644 index 00000000..c43229e5 --- /dev/null +++ b/tests/src/populators/pictique/chats.ts @@ -0,0 +1,36 @@ +import { createChat as createChatApi, searchUsers } from '../../utils/api-client'; + +export interface CreatedChat { + id: string; + participants: string[]; + name?: string; +} + +/** + * Create a chat on Pictique + * participantEnames: array of enames (e.g., ["@user1", "@user2"]) + */ +export async function createChat( + token: string, + participantEnames: string[] +): Promise { + // Look up user IDs from enames + const participantIds: string[] = []; + for (const ename of participantEnames) { + const users = await searchUsers(ename); + const user = users.find((u: any) => u.ename === ename); + if (!user || !user.id) { + throw new Error(`User not found in Pictique: ${ename}`); + } + participantIds.push(user.id); + } + + const chat = await createChatApi(participantIds, undefined, token); + + return { + id: chat.id, + participants: chat.participants?.map((p: any) => p.id || p) || participantIds, + name: chat.name, + }; +} + diff --git a/tests/src/populators/pictique/comments.ts b/tests/src/populators/pictique/comments.ts new file mode 100644 index 00000000..9ff992e1 --- /dev/null +++ b/tests/src/populators/pictique/comments.ts @@ -0,0 +1,27 @@ +import { createComment as createCommentApi } from '../../utils/api-client'; + +export interface CreatedComment { + id: string; + text: string; + authorId: string; + postId: string; +} + +/** + * Create a comment on Pictique + */ +export async function createComment( + token: string, + postId: string, + text: string +): Promise { + const comment = await createCommentApi(postId, text, token); + + return { + id: comment.id, + text: comment.text, + authorId: comment.author?.id || comment.author?.ename || '', + postId: comment.postId || postId, + }; +} + diff --git a/tests/src/populators/pictique/likes.ts b/tests/src/populators/pictique/likes.ts new file mode 100644 index 00000000..0b236e41 --- /dev/null +++ b/tests/src/populators/pictique/likes.ts @@ -0,0 +1,21 @@ +import { toggleLike as toggleLikeApi } from '../../utils/api-client'; + +export interface CreatedLike { + userId: string; + postId: string; + isLiked: boolean; +} + +/** + * Create a like on Pictique + */ +export async function createLike(token: string, postId: string, userId: string): Promise { + await toggleLikeApi(postId, token); + + return { + userId, + postId, + isLiked: true, + }; +} + diff --git a/tests/src/populators/pictique/messages.ts b/tests/src/populators/pictique/messages.ts new file mode 100644 index 00000000..c87f3d23 --- /dev/null +++ b/tests/src/populators/pictique/messages.ts @@ -0,0 +1,28 @@ +import { sendMessage as sendMessageApi } from '../../utils/api-client'; + +export interface CreatedMessage { + id: string; + chatId: string; + senderId: string; + text: string; +} + +/** + * Create a message on Pictique + */ +export async function createMessage( + token: string, + chatId: string, + text: string, + senderId: string +): Promise { + const message = await sendMessageApi(chatId, text, token); + + return { + id: message.id, + chatId, + senderId, + text: message.text, + }; +} + diff --git a/tests/src/populators/pictique/posts.ts b/tests/src/populators/pictique/posts.ts new file mode 100644 index 00000000..30c421e6 --- /dev/null +++ b/tests/src/populators/pictique/posts.ts @@ -0,0 +1,21 @@ +import { createPost as createPostApi } from '../../utils/api-client'; + +export interface CreatedPost { + id: string; + text: string; + authorId: string; +} + +/** + * Create a post on Pictique + */ +export async function createPost(token: string, text: string): Promise { + const post = await createPostApi({ text }, token); + + return { + id: post.id, + text: post.text, + authorId: post.author?.id || post.author?.ename || '', + }; +} + diff --git a/tests/src/sync-verification.test.ts b/tests/src/sync-verification.test.ts new file mode 100644 index 00000000..24b2cd9b --- /dev/null +++ b/tests/src/sync-verification.test.ts @@ -0,0 +1,867 @@ +import { createTestUsers, TestUser } from './utils/user-factory'; +import { getAuthToken } from './utils/api-client'; +import { config } from './config/env'; +import * as falso from '@ngneat/falso'; +import Table from 'cli-table3'; + +import { createPost as createBlabsyPost } from './populators/blabsy/posts'; +import { createComment as createBlabsyComment } from './populators/blabsy/comments'; +import { createLike as createBlabsyLike } from './populators/blabsy/likes'; +import { createChat as createBlabsyChat } from './populators/blabsy/chats'; +import { createMessage as createBlabsyMessage } from './populators/blabsy/messages'; + +import { createPost as createPictiquePost } from './populators/pictique/posts'; +import { createComment as createPictiqueComment } from './populators/pictique/comments'; +import { createLike as createPictiqueLike } from './populators/pictique/likes'; +import { createChat as createPictiqueChat } from './populators/pictique/chats'; +import { createMessage as createPictiqueMessage } from './populators/pictique/messages'; + +// Data comparison +import { compareAllData } from './utils/data-comparator'; +import { Platform, TestSocialUser, TestSocialUserFactory } from './factories'; + + +const TEST_CONFIG = { + POSTS_PER_USER: 2, + COMMENTS_PER_USER: 4, +} + +describe('Sync Verification Test', () => { + const USER_COUNT = config.userCount; + let loadedUsers: TestUser[] = []; + let userTokens: Map = new Map(); + let testSocialUsers: TestSocialUser[] = []; + + // Track expected sync counts and sync times for summary report + const syncSummary = { + posts: { blabsy: 0, pictique: 0 }, + chats: { blabsy: 0, pictique: 0 }, + comments: { blabsy: 0, pictique: 0 }, + messages: { blabsy: 0, pictique: 0 }, + }; + + // Track actual sync counts + const actualSyncCounts = { + posts: { blabsyToPictique: 0, pictiqueToBlabsy: 0 }, + chats: { blabsyToPictique: 0, pictiqueToBlabsy: 0 }, + comments: { blabsyToPictique: 0, pictiqueToBlabsy: 0 }, + messages: { blabsyToPictique: 0, pictiqueToBlabsy: 0 }, + }; + + + beforeAll(async () => { + // Clear cache if requested + if (process.env.CLEAR_USER_CACHE === 'true') { + const { clearUserCache } = await import('./utils/user-cache'); + clearUserCache(); + } + + // Check if cache exists and is valid before creating users + const { isCacheValid } = await import('./utils/user-cache'); + const cacheWasValid = isCacheValid(USER_COUNT); + let usersWereCreated = false; + + // Create or load users from cache + loadedUsers = await createTestUsers(USER_COUNT); + + // If cache wasn't valid, users were just created + if (!cacheWasValid) { + usersWereCreated = true; + } + + // Ensure we have enough users (create more if cache had fewer) + if (loadedUsers.length < USER_COUNT) { + const additionalUsers = await createTestUsers(USER_COUNT - loadedUsers.length, false); + loadedUsers.push(...additionalUsers); + usersWereCreated = true; + } + + // If users were created (not from cache), wait a bit for them to sync + if (usersWereCreated) { + console.log('Users were created (not from cache), waiting 10 seconds for sync...'); + await new Promise(resolve => setTimeout(resolve, 10_000)); + } + + for (const user of loadedUsers) { + const socialUser = TestSocialUserFactory.createRandomPlatform(user.ename); + testSocialUsers.push(socialUser) + } + + }, 300000); // 5 minute timeout + + describe('Posts and Chats Sync', () => { + let pictiquePosts: any[] = []; + let blabsyPosts: any[] = []; + let pictiqueChats: any[] = []; + let blabsyChats: any[] = []; + let loadedPostsFromPictique: any[] = []; + let loadedPostsFromBlabsy: any[] = []; + let loadedChatsFromPictique: any[] = []; + let loadedChatsFromBlabsy: any[] = []; + + beforeAll(async () => { + // Batch create posts and chats in parallel (they're independent) + const createPromises: Promise[] = []; + + // Create posts from all users + for (const user of testSocialUsers) { + createPromises.push( + user.createPost(falso.randSentence()).then(post => { + if (user.metadata.platform === Platform.BLABSY) { + blabsyPosts.push(post); + syncSummary.posts.blabsy++; + } else if (user.metadata.platform === Platform.PICTIQUE) { + pictiquePosts.push(post); + syncSummary.posts.pictique++; + } + }) + ); + } + + // Create DMs between n/2 users (all permutations of pairs) + // Only use first half of users for DM creation + const dmUserCount = Math.floor(loadedUsers.length / 2); + const dmUsers = loadedUsers.slice(0, dmUserCount); + + if (dmUsers.length >= 2) { + // Create all permutations of pairs for DMs + for (let i = 0; i < dmUsers.length; i++) { + for (let j = i + 1; j < dmUsers.length; j++) { + const user1 = dmUsers[i]; + const user2 = dmUsers[j]; + const user1Blabsy = TestSocialUserFactory.create(Platform.BLABSY, user1.ename); + const user1Pictique = TestSocialUserFactory.create(Platform.PICTIQUE, user1.ename); + + createPromises.push( + user1Blabsy.createChat([user1.ename, user2.ename]).then(chat => { + blabsyChats.push(chat); + syncSummary.chats.blabsy++; + }) + ); + + createPromises.push( + user1Pictique.createChat([user1.ename, user2.ename]).then(chat => { + pictiqueChats.push(chat); + syncSummary.chats.pictique++; + }) + ); + } + } + } + + await Promise.all(createPromises); + + // Wait 20 seconds for sync + await new Promise(resolve => setTimeout(resolve, 20_000)); + + // Fetch all data from both platforms + const [user] = loadedUsers.map(u => TestSocialUserFactory.createForBothPlatforms(u.ename)); + loadedPostsFromPictique = await user.pictique.getAllPosts(); + loadedPostsFromBlabsy = await user.blabsy.getAllPosts(); + + // For Pictique, fetch chats from ALL users since getUserChats only returns chats for the logged-in user + const pictiqueChatsSet = new Map(); + for (const testUser of loadedUsers) { + const pictiqueUser = TestSocialUserFactory.create(Platform.PICTIQUE, testUser.ename); + const userChats = await pictiqueUser.getAllChats(); + for (const chat of userChats) { + if (!pictiqueChatsSet.has(chat.id)) { + pictiqueChatsSet.set(chat.id, chat); + } + } + } + loadedChatsFromPictique = Array.from(pictiqueChatsSet.values()); + + // For Blabsy, we can fetch all chats directly from Firestore + loadedChatsFromBlabsy = await user.blabsy.getAllChats(); + }, 300_000); + + test('[Posts] Blabsy -> Pictique', () => { + const failedSyncs: any[] = []; + let pictiquePostSyncCounter = 0; + for (const post of blabsyPosts) { + const match = loadedPostsFromPictique.find((p: any) => + p.text === post.text + ); + if (match) { + pictiquePostSyncCounter++; + } else { + failedSyncs.push({ type: 'post', id: post.id, text: post.text, platform: 'Blabsy' }); + } + } + actualSyncCounts.posts.blabsyToPictique = pictiquePostSyncCounter; + if (failedSyncs.length > 0) { + console.log('\n❌ Failed to sync Blabsy -> Pictique:', JSON.stringify(failedSyncs, null, 2)); + } + expect(blabsyPosts.length).toEqual(pictiquePostSyncCounter); + }); + + test('[Posts] Pictique -> Blabsy', () => { + const failedSyncs: any[] = []; + let blabsyPostSyncCounter = 0; + for (const post of pictiquePosts) { + const match = loadedPostsFromBlabsy.find((p: any) => + p.text === post.text + ); + if (match) { + blabsyPostSyncCounter++; + } else { + failedSyncs.push({ type: 'post', id: post.id, text: post.text, platform: 'Pictique' }); + } + } + actualSyncCounts.posts.pictiqueToBlabsy = blabsyPostSyncCounter; + if (failedSyncs.length > 0) { + console.log('\n❌ Failed to sync Pictique -> Blabsy:', JSON.stringify(failedSyncs, null, 2)); + } + expect(pictiquePosts.length).toEqual(blabsyPostSyncCounter); + }); + + test('[Chats] Blabsy -> Pictique', () => { + if (blabsyChats.length === 0) { + return; + } + + // Helper function to compare participant arrays properly + const arraysEqual = (a: string[], b: string[]): boolean => { + if (a.length !== b.length) return false; + const setA = new Set(a); + const setB = new Set(b); + if (setA.size !== setB.size) return false; + for (const item of setA) { + if (!setB.has(item)) return false; + } + return true; + }; + + console.log(`\n🔍 DEBUG: Looking for ${blabsyChats.length} Blabsy chats in ${loadedChatsFromPictique.length} Pictique chats`); + console.log(`📋 Created Blabsy chats (from test creation):`); + for (const c of blabsyChats) { + const participants = (c.participants || []).map((p: string) => { + const normalized = p.startsWith('@') ? p.slice(1) : p; + return `@${normalized}`; + }).sort(); + console.log(` - Chat ${c.id}:`, JSON.stringify(participants)); + } + console.log(`📋 Fetched Blabsy chats (from Firestore):`); + for (const c of loadedChatsFromBlabsy) { + const participants = (c.participants || []).map((p: string) => { + const normalized = p.startsWith('@') ? p.slice(1) : p; + return `@${normalized}`; + }).sort(); + console.log(` - Chat ${c.id}:`, JSON.stringify(participants)); + } + + // Verify all created chats exist in Firestore + const createdChatIds = new Set(blabsyChats.map(c => c.id)); + const fetchedChatIds = new Set(loadedChatsFromBlabsy.map((c: any) => c.id)); + const missingChats = blabsyChats.filter(c => !fetchedChatIds.has(c.id)); + if (missingChats.length > 0) { + console.error(`\n⚠️ WARNING: ${missingChats.length} created Blabsy chats not found in Firestore:`, missingChats.map(c => c.id)); + } + console.log(`📋 Available Pictique chats:`); + for (const c of loadedChatsFromPictique) { + const participants = (c.participants || []).map((p: any) => ({ + id: p.id, + handle: p.handle, + ename: p.ename, + normalized: p.handle ? `@${p.handle}` : (p.ename || `@${p.id}`) + })); + console.log(` - Chat ${c.id}:`, JSON.stringify(participants, null, 2)); + } + + const failedSyncs: any[] = []; + let pictiqueChatSyncCounter = 0; + const usedPictiqueChatIds = new Set(); // Track matched chats to avoid duplicates + + for (const chat of blabsyChats) { + // Blabsy participants are enames (with or without @) + const blabsyParticipants = (chat.participants || []).map((p: string) => { + // Normalize: remove @ if present, then add it back for consistency + const normalized = p.startsWith('@') ? p.slice(1) : p; + return `@${normalized}`; + }).sort(); + + console.log(`\n🔍 Looking for Blabsy chat ${chat.id} with participants:`, blabsyParticipants); + + const match = loadedChatsFromPictique.find((c: any) => { + // Skip if already matched + if (usedPictiqueChatIds.has(c.id)) { + console.log(` ⏭️ Skipping Pictique chat ${c.id} (already matched)`); + return false; + } + + // Pictique participants - use handle (without @) + const pictiqueParticipants = (c.participants || []).map((p: any) => { + // Handle never has @, so always add it + if (!p.handle) { + console.error(' ❌ Pictique participant missing handle:', p); + } + return `@${p.handle}`; + }).sort(); + + console.log(` 🔎 Comparing with Pictique chat ${c.id} participants:`, pictiqueParticipants); + const isMatch = arraysEqual(blabsyParticipants, pictiqueParticipants); + console.log(` ${isMatch ? '✅ MATCH' : '❌ NO MATCH'}`); + return isMatch; + }); + + if (match) { + pictiqueChatSyncCounter++; + usedPictiqueChatIds.add(match.id); + console.log(` ✅ Matched Blabsy chat ${chat.id} with Pictique chat ${match.id}`); + } else { + console.log(` ❌ NO MATCH FOUND for Blabsy chat ${chat.id}`); + failedSyncs.push({ type: 'chat', id: chat.id, participants: blabsyParticipants, platform: 'Blabsy' }); + } + } + + actualSyncCounts.chats.blabsyToPictique = pictiqueChatSyncCounter; + if (failedSyncs.length > 0) { + console.log('\n❌ Failed to sync Blabsy -> Pictique:', JSON.stringify(failedSyncs, null, 2)); + } + expect(blabsyChats.length).toEqual(pictiqueChatSyncCounter); + }); + + test('[Chats] Pictique -> Blabsy', () => { + if (pictiqueChats.length === 0) { + return; + } + + // Helper function to compare participant arrays properly + const arraysEqual = (a: string[], b: string[]): boolean => { + if (a.length !== b.length) return false; + const setA = new Set(a); + const setB = new Set(b); + if (setA.size !== setB.size) return false; + for (const item of setA) { + if (!setB.has(item)) return false; + } + return true; + }; + + const failedSyncs: any[] = []; + let blabsyChatSyncCounter = 0; + const usedBlabsyChatIds = new Set(); // Track matched chats to avoid duplicates + + for (const chat of pictiqueChats) { + // Pictique participants - use handle (without @) + const pictiqueParticipants = (chat.participants || []).map((p: any) => { + // Handle never has @, so always add it + if (!p.handle) { + console.error('Pictique participant missing handle:', p); + } + return `@${p.handle}`; + }).sort(); + + const match = loadedChatsFromBlabsy.find((c: any) => { + // Skip if already matched + if (usedBlabsyChatIds.has(c.id)) return false; + + // Blabsy participants are enames (with or without @) + const blabsyParticipants = (c.participants || []).map((p: string) => { + // Normalize: remove @ if present, then add it back for consistency + const normalized = p.startsWith('@') ? p.slice(1) : p; + return `@${normalized}`; + }).sort(); + + return arraysEqual(pictiqueParticipants, blabsyParticipants); + }); + + if (match) { + blabsyChatSyncCounter++; + usedBlabsyChatIds.add(match.id); + } else { + failedSyncs.push({ type: 'chat', id: chat.id, participants: pictiqueParticipants, platform: 'Pictique' }); + } + } + + actualSyncCounts.chats.pictiqueToBlabsy = blabsyChatSyncCounter; + if (failedSyncs.length > 0) { + console.log('\n❌ Failed to sync Pictique -> Blabsy:', JSON.stringify(failedSyncs, null, 2)); + } + expect(pictiqueChats.length).toEqual(blabsyChatSyncCounter); + }); + }, 300_000); + + describe('Comments and Messages Sync', () => { + let pictiqueComments: any[] = []; + let blabsyComments: any[] = []; + let pictiqueMessages: any[] = []; + let blabsyMessages: any[] = []; + let blabsyPostIdForComments: string | null = null; + let pictiquePostIdForComments: string | null = null; + let syncedPictiquePostId: string | null = null; + let syncedBlabsyPostId: string | null = null; + + beforeAll(async () => { + // Get posts and chats from both platforms (from previous test) + const [user] = loadedUsers.map(u => TestSocialUserFactory.createForBothPlatforms(u.ename)); + const allPictiquePosts = await user.pictique.getAllPosts(); + const allBlabsyPosts = await user.blabsy.getAllPosts(); + const allPictiqueChats = await user.pictique.getAllChats(); + const allBlabsyChats = await user.blabsy.getAllChats(); + + // Find posts for comments + if (allBlabsyPosts.length > 0) { + const blabsyPost = allBlabsyPosts[0]; + blabsyPostIdForComments = blabsyPost.id; + const syncedPost = allPictiquePosts.find((p: any) => p.text === blabsyPost.text); + if (syncedPost) { + syncedPictiquePostId = syncedPost.id; + } + } + + if (allPictiquePosts.length > 0) { + const pictiquePost = allPictiquePosts[0]; + pictiquePostIdForComments = pictiquePost.id; + const syncedPost = allBlabsyPosts.find((p: any) => p.text === pictiquePost.text); + if (syncedPost) { + syncedBlabsyPostId = syncedPost.id; + } + } + + + // Batch create comments and messages in parallel + const createPromises: Promise[] = []; + + // Create comments from Blabsy users on Blabsy posts + if (blabsyPostIdForComments) { + for (const user of testSocialUsers) { + if (user.metadata.platform === Platform.BLABSY) { + createPromises.push( + user.createComment(blabsyPostIdForComments, falso.randSentence()).then(comment => { + blabsyComments.push(comment); + syncSummary.comments.blabsy++; + }) + ); + } + } + } + + // Create comments from Pictique users on Pictique posts + if (pictiquePostIdForComments) { + for (const user of testSocialUsers) { + if (user.metadata.platform === Platform.PICTIQUE) { + createPromises.push( + user.createComment(pictiquePostIdForComments, falso.randSentence()).then(comment => { + pictiqueComments.push(comment); + syncSummary.comments.pictique++; + }) + ); + } + } + } + + // Create messages from users who are actually participants in ALL chats + // Each user sends 1 message per chat + + // Use all Blabsy chats + for (const blabsyChat of allBlabsyChats) { + const chatParticipantEnames = (blabsyChat.participants || []).map((p: string) => + p.startsWith('@') ? p : `@${p}` + ); + + // Only create messages from users who are participants in the chat + for (const user of loadedUsers) { + const userEname = user.ename.startsWith('@') ? user.ename : `@${user.ename}`; + if (chatParticipantEnames.includes(userEname)) { + const blabsyUser = TestSocialUserFactory.create(Platform.BLABSY, user.ename); + createPromises.push( + blabsyUser.createMessage(blabsyChat.id, falso.randSentence()).then(message => { + blabsyMessages.push(message); + syncSummary.messages.blabsy++; + }) + ); + } + } + } + + // Use all Pictique chats + for (const pictiqueChat of allPictiqueChats) { + const chatParticipantEnames = (pictiqueChat.participants || []).map((p: any) => { + const ename = p.ename || p.id || p; + return ename.startsWith('@') ? ename : `@${ename}`; + }); + + // Only create messages from users who are participants in the chat + for (const user of loadedUsers) { + const userEname = user.ename.startsWith('@') ? user.ename : `@${user.ename}`; + if (chatParticipantEnames.includes(userEname)) { + const pictiqueUser = TestSocialUserFactory.create(Platform.PICTIQUE, user.ename); + createPromises.push( + pictiqueUser.createMessage(pictiqueChat.id, falso.randSentence()).then(message => { + pictiqueMessages.push(message); + syncSummary.messages.pictique++; + }) + ); + } + } + } + + await Promise.all(createPromises); + + // Wait 20 seconds for sync + await new Promise(resolve => setTimeout(resolve, 20_000)); + + }, 300_000); + + test('[Comments] Blabsy -> Pictique', async () => { + if (!blabsyPostIdForComments || !syncedPictiquePostId || blabsyComments.length === 0) { + return; + } + + const [user] = loadedUsers.map(u => TestSocialUserFactory.createForBothPlatforms(u.ename)); + const loadedCommentsFromPictique = await user.pictique.getAllComments(syncedPictiquePostId); + + const failedSyncs: any[] = []; + let pictiqueCommentSyncCounter = 0; + for (const comment of blabsyComments) { + const match = loadedCommentsFromPictique.find((c: any) => + c.text === comment.text + ); + if (match) { + pictiqueCommentSyncCounter++; + } else { + failedSyncs.push({ type: 'comment', id: comment.id, text: comment.text, platform: 'Blabsy' }); + } + } + if (failedSyncs.length > 0) { + console.log('\n❌ Failed to sync Blabsy -> Pictique:', JSON.stringify(failedSyncs, null, 2)); + } + expect(blabsyComments.length).toEqual(pictiqueCommentSyncCounter); + }); + + test('[Comments] Pictique -> Blabsy', async () => { + if (!pictiquePostIdForComments || !syncedBlabsyPostId || pictiqueComments.length === 0) { + return; + } + + const [user] = loadedUsers.map(u => TestSocialUserFactory.createForBothPlatforms(u.ename)); + const loadedCommentsFromBlabsy = await user.blabsy.getAllComments(syncedBlabsyPostId); + + const failedSyncs: any[] = []; + let blabsyCommentSyncCounter = 0; + for (const comment of pictiqueComments) { + const match = loadedCommentsFromBlabsy.find((c: any) => + c.text === comment.text + ); + if (match) { + blabsyCommentSyncCounter++; + } else { + failedSyncs.push({ type: 'comment', id: comment.id, text: comment.text, platform: 'Pictique' }); + } + } + if (failedSyncs.length > 0) { + console.log('\n❌ Failed to sync Pictique -> Blabsy:', JSON.stringify(failedSyncs, null, 2)); + } + expect(pictiqueComments.length).toEqual(blabsyCommentSyncCounter); + }); + + test('[Messages] Blabsy -> Pictique', async () => { + if (blabsyMessages.length === 0) { + return; + } + + console.log(`\n🔍 DEBUG: Checking ${blabsyMessages.length} Blabsy messages for sync`); + console.log(`📋 Blabsy messages:`, blabsyMessages.map(m => ({ + id: m.id, + chatId: m.chatId, + text: m.text?.substring(0, 50) + '...', + senderId: m.senderId + }))); + + const [user] = loadedUsers.map(u => TestSocialUserFactory.createForBothPlatforms(u.ename)); + const allBlabsyChats = await user.blabsy.getAllChats(); + + // For Pictique, fetch chats from ALL users since getUserChats only returns chats for the logged-in user + const pictiqueChatsSet = new Map(); + for (const testUser of loadedUsers) { + const pictiqueUser = TestSocialUserFactory.create(Platform.PICTIQUE, testUser.ename); + const userChats = await pictiqueUser.getAllChats(); + for (const chat of userChats) { + if (!pictiqueChatsSet.has(chat.id)) { + pictiqueChatsSet.set(chat.id, chat); + } + } + } + const allPictiqueChats = Array.from(pictiqueChatsSet.values()); + console.log(`📋 Found ${allPictiqueChats.length} Pictique chats from all users`); + + // Build a map of Blabsy chat ID -> Pictique chat ID + const chatIdMap = new Map(); + for (const blabsyChat of allBlabsyChats) { + const blabsyParticipants = (blabsyChat.participants || []).map((p: string) => + p.startsWith('@') ? p : `@${p}` + ).sort(); + + const syncedChat = allPictiqueChats.find((c: any) => { + const pictiqueParticipants = (c.participants || []).map((p: any) => { + const handle = p.handle || p.id; + return handle ? `@${handle}` : (p.ename ? (p.ename.startsWith('@') ? p.ename : `@${p.ename}`) : `@${p.id}`); + }).sort(); + return JSON.stringify(blabsyParticipants) === JSON.stringify(pictiqueParticipants); + }); + + if (syncedChat) { + chatIdMap.set(blabsyChat.id, syncedChat.id); + console.log(`✅ Mapped Blabsy chat ${blabsyChat.id} -> Pictique chat ${syncedChat.id}`); + } else { + console.log(`❌ No Pictique chat found for Blabsy chat ${blabsyChat.id} with participants:`, blabsyParticipants); + } + } + + const failedSyncs: any[] = []; + let pictiqueMessageSyncCounter = 0; + + // Group messages by chat ID + const messagesByChat = new Map(); + for (const message of blabsyMessages) { + const chatId = message.chatId; + if (!messagesByChat.has(chatId)) { + messagesByChat.set(chatId, []); + } + messagesByChat.get(chatId)!.push(message); + } + + console.log(`\n📋 Messages grouped by chat:`, Array.from(messagesByChat.entries()).map(([chatId, msgs]) => ({ + blabsyChatId: chatId, + messageCount: msgs.length, + messages: msgs.map(m => ({ id: m.id, text: m.text?.substring(0, 30) + '...' })) + }))); + + // Check each chat's messages + for (const [blabsyChatId, messages] of messagesByChat.entries()) { + console.log(`\n🔍 Checking messages for Blabsy chat ${blabsyChatId} (${messages.length} messages)`); + const pictiqueChatId = chatIdMap.get(blabsyChatId); + if (!pictiqueChatId) { + console.log(` ❌ Chat ${blabsyChatId} didn't sync to Pictique, marking all ${messages.length} messages as failed`); + // Chat didn't sync, all messages failed + messages.forEach(msg => { + failedSyncs.push({ type: 'message', id: msg.id, text: msg.text, platform: 'Blabsy', chatId: blabsyChatId }); + }); + continue; + } + + console.log(` ✅ Chat synced: Blabsy ${blabsyChatId} -> Pictique ${pictiqueChatId}`); + + // Find a user who is a participant in this Pictique chat to fetch messages + const pictiqueChat = allPictiqueChats.find(c => c.id === pictiqueChatId); + let participantUser = user.pictique; + if (pictiqueChat && pictiqueChat.participants && pictiqueChat.participants.length > 0) { + // Get the first participant's handle (which is ename without @) + const firstParticipant = pictiqueChat.participants[0]; + const participantHandle = firstParticipant.handle || firstParticipant.id; + console.log(` 👤 First participant handle: ${participantHandle}`); + + // Find a test user whose ename (without @) matches the participant's handle + const participantTestUser = loadedUsers.find(u => { + const userHandle = u.ename.startsWith('@') ? u.ename.slice(1) : u.ename; + return userHandle === participantHandle; + }); + + if (participantTestUser) { + participantUser = TestSocialUserFactory.create(Platform.PICTIQUE, participantTestUser.ename); + console.log(` ✅ Using participant user: ${participantTestUser.ename}`); + } else { + console.log(` ⚠️ Could not find participant user for handle ${participantHandle}, using default user`); + } + } + + console.log(` 📥 Fetching messages from Pictique chat ${pictiqueChatId}...`); + const loadedMessagesFromPictique = await participantUser.getAllMessages(pictiqueChatId); + console.log(` 📥 Found ${loadedMessagesFromPictique.length} messages in Pictique chat`); + console.log(` 📋 Pictique messages:`, loadedMessagesFromPictique.map((m: any) => ({ + id: m.id, + text: m.text?.substring(0, 50) + '...', + senderId: m.sender?.id || m.senderId + }))); + + for (const message of messages) { + console.log(` 🔎 Looking for Blabsy message: "${message.text?.substring(0, 50)}..."`); + const match = loadedMessagesFromPictique.find((m: any) => + m.text === message.text + ); + if (match) { + console.log(` ✅ MATCH found! Pictique message ID: ${match.id}`); + pictiqueMessageSyncCounter++; + } else { + console.log(` ❌ NO MATCH - message not found in Pictique`); + failedSyncs.push({ type: 'message', id: message.id, text: message.text, platform: 'Blabsy', chatId: blabsyChatId }); + } + } + } + + if (failedSyncs.length > 0) { + console.log('\n❌ Failed to sync Blabsy -> Pictique:', JSON.stringify(failedSyncs, null, 2)); + } + expect(blabsyMessages.length).toEqual(pictiqueMessageSyncCounter); + }); + + test('[Messages] Pictique -> Blabsy', async () => { + if (pictiqueMessages.length === 0) { + return; + } + + const [user] = loadedUsers.map(u => TestSocialUserFactory.createForBothPlatforms(u.ename)); + const allBlabsyChats = await user.blabsy.getAllChats(); + + // For Pictique, fetch chats from ALL users since getUserChats only returns chats for the logged-in user + const pictiqueChatsSet = new Map(); + for (const testUser of loadedUsers) { + const pictiqueUser = TestSocialUserFactory.create(Platform.PICTIQUE, testUser.ename); + const userChats = await pictiqueUser.getAllChats(); + for (const chat of userChats) { + if (!pictiqueChatsSet.has(chat.id)) { + pictiqueChatsSet.set(chat.id, chat); + } + } + } + const allPictiqueChats = Array.from(pictiqueChatsSet.values()); + + // Build a map of Pictique chat ID -> Blabsy chat ID + const chatIdMap = new Map(); + for (const pictiqueChat of allPictiqueChats) { + const pictiqueParticipants = (pictiqueChat.participants || []).map((p: any) => { + const ename = p.ename || p.id || p; + return ename.startsWith('@') ? ename : `@${ename}`; + }).sort(); + + const syncedChat = allBlabsyChats.find((c: any) => { + const blabsyParticipants = (c.participants || []).map((p: string) => + p.startsWith('@') ? p : `@${p}` + ).sort(); + return JSON.stringify(pictiqueParticipants) === JSON.stringify(blabsyParticipants); + }); + + if (syncedChat) { + chatIdMap.set(pictiqueChat.id, syncedChat.id); + } + } + + const failedSyncs: any[] = []; + let blabsyMessageSyncCounter = 0; + + // Group messages by chat ID + const messagesByChat = new Map(); + for (const message of pictiqueMessages) { + const chatId = message.chatId; + if (!messagesByChat.has(chatId)) { + messagesByChat.set(chatId, []); + } + messagesByChat.get(chatId)!.push(message); + } + + // Check each chat's messages + for (const [pictiqueChatId, messages] of messagesByChat.entries()) { + const blabsyChatId = chatIdMap.get(pictiqueChatId); + if (!blabsyChatId) { + // Chat didn't sync, all messages failed + messages.forEach(msg => { + failedSyncs.push({ type: 'message', id: msg.id, text: msg.text, platform: 'Pictique', chatId: pictiqueChatId }); + }); + continue; + } + + const loadedMessagesFromBlabsy = await user.blabsy.getAllMessages(blabsyChatId); + + for (const message of messages) { + const match = loadedMessagesFromBlabsy.find((m: any) => + m.text === message.text + ); + if (match) { + blabsyMessageSyncCounter++; + } else { + failedSyncs.push({ type: 'message', id: message.id, text: message.text, platform: 'Pictique', chatId: pictiqueChatId }); + } + } + } + + if (failedSyncs.length > 0) { + console.log('\n❌ Failed to sync Pictique -> Blabsy:', JSON.stringify(failedSyncs, null, 2)); + } + expect(pictiqueMessages.length).toEqual(blabsyMessageSyncCounter); + }); + }, 300_000); + + afterAll(() => { + // Helper function to format status + const formatStatus = (expected: number, actual: number): string => { + if (expected === actual) { + return '✅'; + } + return '❌'; + }; + + // Create table + const table = new Table({ + head: ['Entity Type', 'Expected', 'Actual', 'Status'], + style: { + head: ['cyan', 'bold'], + border: ['gray'], + }, + colWidths: [30, 10, 10, 10], + }); + + // Posts + const postsBlabsyToPictique = syncSummary.posts.blabsy; + const postsPictiqueToBlabsy = syncSummary.posts.pictique; + const actualPostsBlabsyToPictique = actualSyncCounts.posts.blabsyToPictique; + const actualPostsPictiqueToBlabsy = actualSyncCounts.posts.pictiqueToBlabsy; + + table.push( + ['Posts: Blabsy → Pictique', postsBlabsyToPictique, actualPostsBlabsyToPictique, formatStatus(postsBlabsyToPictique, actualPostsBlabsyToPictique)], + ['Posts: Pictique → Blabsy', postsPictiqueToBlabsy, actualPostsPictiqueToBlabsy, formatStatus(postsPictiqueToBlabsy, actualPostsPictiqueToBlabsy)] + ); + + // Chats + const chatsBlabsyToPictique = syncSummary.chats.blabsy; + const chatsPictiqueToBlabsy = syncSummary.chats.pictique; + const actualChatsBlabsyToPictique = actualSyncCounts.chats.blabsyToPictique; + const actualChatsPictiqueToBlabsy = actualSyncCounts.chats.pictiqueToBlabsy; + + table.push( + ['Chats: Blabsy → Pictique', chatsBlabsyToPictique, actualChatsBlabsyToPictique, formatStatus(chatsBlabsyToPictique, actualChatsBlabsyToPictique)], + ['Chats: Pictique → Blabsy', chatsPictiqueToBlabsy, actualChatsPictiqueToBlabsy, formatStatus(chatsPictiqueToBlabsy, actualChatsPictiqueToBlabsy)] + ); + + // Comments + const commentsBlabsyToPictique = syncSummary.comments.blabsy; + const commentsPictiqueToBlabsy = syncSummary.comments.pictique; + const actualCommentsBlabsyToPictique = actualSyncCounts.comments.blabsyToPictique; + const actualCommentsPictiqueToBlabsy = actualSyncCounts.comments.pictiqueToBlabsy; + + table.push( + ['Comments: Blabsy → Pictique', commentsBlabsyToPictique, actualCommentsBlabsyToPictique, formatStatus(commentsBlabsyToPictique, actualCommentsBlabsyToPictique)], + ['Comments: Pictique → Blabsy', commentsPictiqueToBlabsy, actualCommentsPictiqueToBlabsy, formatStatus(commentsPictiqueToBlabsy, actualCommentsPictiqueToBlabsy)] + ); + + // Messages + const messagesBlabsyToPictique = syncSummary.messages.blabsy; + const messagesPictiqueToBlabsy = syncSummary.messages.pictique; + const actualMessagesBlabsyToPictique = actualSyncCounts.messages.blabsyToPictique; + const actualMessagesPictiqueToBlabsy = actualSyncCounts.messages.pictiqueToBlabsy; + + table.push( + ['Messages: Blabsy → Pictique', messagesBlabsyToPictique, actualMessagesBlabsyToPictique, formatStatus(messagesBlabsyToPictique, actualMessagesBlabsyToPictique)], + ['Messages: Pictique → Blabsy', messagesPictiqueToBlabsy, actualMessagesPictiqueToBlabsy, formatStatus(messagesPictiqueToBlabsy, actualMessagesPictiqueToBlabsy)] + ); + + // Summary statistics + const totalExpected = postsBlabsyToPictique + postsPictiqueToBlabsy + + chatsBlabsyToPictique + chatsPictiqueToBlabsy + + commentsBlabsyToPictique + commentsPictiqueToBlabsy + + messagesBlabsyToPictique + messagesPictiqueToBlabsy; + const totalActual = actualPostsBlabsyToPictique + actualPostsPictiqueToBlabsy + + actualChatsBlabsyToPictique + actualChatsPictiqueToBlabsy + + actualCommentsBlabsyToPictique + actualCommentsPictiqueToBlabsy + + actualMessagesBlabsyToPictique + actualMessagesPictiqueToBlabsy; + const successRate = totalExpected > 0 ? ((totalActual / totalExpected) * 100).toFixed(1) : '0.0'; + + // Print summary + console.log('\n\n📊 SYNC VERIFICATION SUMMARY REPORT'); + console.log('═══════════════════════════════════════════════════════════════════════════════\n'); + console.log(table.toString()); + console.log(`\n📈 Overall: ${totalActual}/${totalExpected} synced (${successRate}% success rate)\n`); + }); +}); + diff --git a/tests/src/utils/api-client.ts b/tests/src/utils/api-client.ts new file mode 100644 index 00000000..7ef6996a --- /dev/null +++ b/tests/src/utils/api-client.ts @@ -0,0 +1,425 @@ +import axios, { AxiosInstance } from 'axios'; +import { config } from '../config/env'; +import jwt from 'jsonwebtoken'; +// eslint-disable-next-line @typescript-eslint/no-require-imports +const EventSource = require('eventsource'); + +let apiClient: AxiosInstance | null = null; + +// Token cache: map of ename -> token +const tokenCache = new Map(); + +/** + * Get or create axios instance for pictique-api + */ +export function getApiClient(): AxiosInstance { + if (!apiClient) { + apiClient = axios.create({ + baseURL: config.pictiqueBaseUri, + timeout: 30000, + headers: { + 'Content-Type': 'application/json', + }, + }); + } + return apiClient; +} + +/** + * Generate JWT token for a user ID (for testing purposes) + */ +export function generateToken(userId: string): string { + return jwt.sign({ userId }, config.jwtSecret!, { expiresIn: '7d' }); +} + +/** + * Get authentication token for a user using the proper auth flow: + * 1. Get offer from /api/auth/offer + * 2. Extract session ID from offer URI + * 3. Start SSE stream on /api/auth/ + * 4. POST to /api/auth with ename and session + * 5. Wait for token from SSE stream + * + * Tokens are cached per user (ename) to avoid re-authentication + */ +export async function getAuthToken(ename: string): Promise { + // Check cache first + if (tokenCache.has(ename)) { + return tokenCache.get(ename)!; + } + + const client = getApiClient(); + + // Step 1: Get offer + const offerResponse = await client.get('/api/auth/offer'); + const offerUri = offerResponse.data.uri; + + // Step 2: Extract session ID from URI + const uriObj = new URL(offerUri); + const sessionId = uriObj.searchParams.get('session'); + + if (!sessionId) { + throw new Error('Session ID not found in offer URI'); + } + + // Step 3: Set up SSE stream listener using axios with streaming + const tokenPromise = new Promise((resolve, reject) => { + const sseUrl = `${config.pictiqueBaseUri}/api/auth/sessions/${sessionId}`; + let postSent = false; + let streamClosed = false; + + const timeout = setTimeout(() => { + streamClosed = true; + reject(new Error('Authentication timeout: No token received from SSE stream')); + }, 30000); // 30 second timeout + + // Step 4: POST to /api/auth while SSE stream is active + const sendAuthPost = async () => { + if (postSent) return; + postSent = true; + + try { + // Don't await - fire and forget to avoid blocking + client.post('/api/auth', { + ename: ename, + session: sessionId, + }).catch((error: any) => { + // Don't reject here - let the SSE stream handle it + // The server might still emit the event even if POST fails + }); + } catch (error: any) { + // Don't reject - let SSE stream handle it + } + }; + + // Use EventSource library for SSE (designed for Node.js) + const eventSource = new EventSource(sseUrl, { + headers: { + 'Accept': 'text/event-stream', + 'Cache-Control': 'no-cache', + }, + }); + + eventSource.onopen = () => { + // Send POST as soon as connection opens + setTimeout(() => { + sendAuthPost(); + }, 100); + }; + + // Also try to send POST after a short delay as fallback + setTimeout(() => { + if (!postSent && !streamClosed) { + sendAuthPost(); + } + }, 500); + + eventSource.onmessage = (event: MessageEvent) => { + if (streamClosed) return; + + try { + const data = JSON.parse(event.data); + if (data.token) { + // Cache the token + tokenCache.set(ename, data.token); + streamClosed = true; + clearTimeout(timeout); + eventSource.close(); + resolve(data.token); + } + } catch (error) { + // Ignore parse errors + } + }; + + eventSource.onerror = (error: any) => { + if (streamClosed) return; + + // readyState: 0 = CONNECTING, 1 = OPEN, 2 = CLOSED + if (eventSource.readyState === EventSource.CLOSED) { + streamClosed = true; + clearTimeout(timeout); + eventSource.close(); + reject(new Error(`SSE connection closed: ${error?.message || 'Connection closed'}`)); + } + // If readyState is CONNECTING (0) or OPEN (1), continue waiting + }; + }); + + return tokenPromise; +} + +/** + * Get user by ID from pictique-api + */ +export async function getUser(userId: string, token: string): Promise { + const client = getApiClient(); + const response = await client.get(`/api/users/${userId}`, { + headers: { + Authorization: `Bearer ${token}`, + }, + }); + return response.data; +} + +/** + * Get current user from pictique-api + */ +export async function getCurrentUser(token: string): Promise { + const client = getApiClient(); + const response = await client.get('/api/users', { + headers: { + Authorization: `Bearer ${token}`, + }, + }); + return response.data; +} + +/** + * Search users by ename or name + */ +export async function searchUsers(query: string): Promise { + const client = getApiClient(); + const response = await client.get('/api/users/search/ename-name', { + params: { q: query }, + }); + return response.data || []; +} + +/** + * Update user profile + */ +export async function updateProfile( + userId: string, + data: { handle?: string; avatar?: string; name?: string }, + token: string +): Promise { + const client = getApiClient(); + const response = await client.patch( + '/api/users', + { + handle: data.handle, + avatar: data.avatar, + name: data.name, + }, + { + headers: { + Authorization: `Bearer ${token}`, + }, + } + ); + return response.data; +} + +/** + * Create a post + */ +export async function createPost( + data: { text: string; images?: string[]; hashtags?: string[] }, + token: string +): Promise { + const client = getApiClient(); + const response = await client.post( + '/api/posts', + { + text: data.text, + images: data.images, + hashtags: data.hashtags, + }, + { + headers: { + Authorization: `Bearer ${token}`, + }, + } + ); + return response.data; +} + +/** + * Toggle like on a post + */ +export async function toggleLike(postId: string, token: string): Promise { + const client = getApiClient(); + const response = await client.post( + `/api/posts/${postId}/like`, + {}, + { + headers: { + Authorization: `Bearer ${token}`, + }, + } + ); + return response.data; +} + +/** + * Create a comment + */ +export async function createComment( + postId: string, + text: string, + token: string +): Promise { + const client = getApiClient(); + const response = await client.post( + '/api/comments', + { + postId, + text, + }, + { + headers: { + Authorization: `Bearer ${token}`, + }, + } + ); + return response.data; +} + +/** + * Get comments for a post + */ +export async function getPostComments(postId: string, token: string): Promise { + const client = getApiClient(); + const response = await client.get(`/api/posts/${postId}/comments`, { + headers: { + Authorization: `Bearer ${token}`, + }, + }); + return response.data || []; +} + +/** + * Create a chat + */ +export async function createChat( + participantIds: string[], + name?: string, + token?: string +): Promise { + const client = getApiClient(); + const response = await client.post( + '/api/chats', + { + participantIds, + name, + }, + { + headers: { + Authorization: `Bearer ${token}`, + }, + } + ); + return response.data; +} + +/** + * Get user chats + */ +export async function getUserChats(token: string): Promise { + const client = getApiClient(); + const response = await client.get('/api/chats', { + headers: { + Authorization: `Bearer ${token}`, + }, + }); + return response.data || []; +} + +/** + * Get chat by ID + */ +export async function getChat(chatId: string, token: string): Promise { + const client = getApiClient(); + const response = await client.get(`/api/chats/${chatId}`, { + headers: { + Authorization: `Bearer ${token}`, + }, + }); + return response.data; +} + +/** + * Send a message in a chat + */ +export async function sendMessage( + chatId: string, + text: string, + token: string +): Promise { + const client = getApiClient(); + const response = await client.post( + `/api/chats/${chatId}/messages`, + { + text, + }, + { + headers: { + Authorization: `Bearer ${token}`, + }, + } + ); + return response.data; +} + +/** + * Get messages from a chat + */ +export async function getChatMessages(chatId: string, token: string): Promise { + const client = getApiClient(); + const response = await client.get(`/api/chats/${chatId}/messages`, { + headers: { + Authorization: `Bearer ${token}`, + }, + }); + return response.data || []; +} + +/** + * Get post by ID (via feed or search) + */ +export async function getPost(postId: string, token: string): Promise { + try { + // Try to get from feed (might not be available) + const client = getApiClient(); + const response = await client.get('/api/posts/feed', { + headers: { + Authorization: `Bearer ${token}`, + }, + params: { + limit: 100, + }, + }); + + const posts = response.data || []; + const post = posts.find((p: any) => p.id === postId); + return post || null; + } catch (error) { + return null; + } +} + +/** + * Retry helper for API calls + */ +export async function retryApiCall( + fn: () => Promise, + maxRetries: number = 3, + delayMs: number = 1000 +): Promise { + let lastError: Error | null = null; + + for (let i = 0; i < maxRetries; i++) { + try { + return await fn(); + } catch (error: any) { + lastError = error; + if (i < maxRetries - 1) { + await new Promise(resolve => setTimeout(resolve, delayMs * (i + 1))); + } + } + } + + throw lastError || new Error('API call failed after retries'); +} + diff --git a/tests/src/utils/data-comparator.ts b/tests/src/utils/data-comparator.ts new file mode 100644 index 00000000..53a18a52 --- /dev/null +++ b/tests/src/utils/data-comparator.ts @@ -0,0 +1,280 @@ +// This file contains comparison utilities for synced data +// Use TestSocialUser.getAllPosts(), getAllChats(), etc. to fetch data instead + +export interface PictiqueData { + posts: any[]; + comments: any[]; + likes: any[]; + chats: any[]; + messages: any[]; +} + +export interface BlabsyData { + tweets: any[]; + replies: any[]; + likes: any[]; + chats: any[]; + messages: any[]; +} + +export interface ComparisonResult { + postsMatch: boolean; + commentsMatch: boolean; + likesMatch: boolean; + chatsMatch: boolean; + messagesMatch: boolean; + errors: string[]; + details: { + pictiquePosts: number; + blabsyTweets: number; + matchedPosts: number; + pictiqueComments: number; + blabsyReplies: number; + matchedComments: number; + pictiqueLikes: number; + blabsyLikes: number; + matchedLikes: number; + pictiqueChats: number; + blabsyChats: number; + matchedChats: number; + pictiqueMessages: number; + blabsyMessages: number; + matchedMessages: number; + }; +} + +/** + * Compare posts by content (author ename + text) + */ +function comparePosts(pictiquePosts: any[], blabsyTweets: any[]): { matched: number; errors: string[] } { + const errors: string[] = []; + let matched = 0; + + for (const pictiquePost of pictiquePosts) { + const authorEname = pictiquePost.author?.ename || pictiquePost.author?.id; + const postText = pictiquePost.text; + + const matchingTweet = blabsyTweets.find((tweet: any) => { + const tweetAuthor = tweet.createdBy; + const tweetText = tweet.text; + + return authorEname === tweetAuthor && postText === tweetText; + }); + + if (matchingTweet) { + matched++; + } else { + errors.push(`Post not found in Blabsy: author="${authorEname}", text="${postText}"`); + } + } + + return { matched, errors }; +} + +/** + * Compare comments by content (author + text + parent) + */ +function compareComments(pictiqueComments: any[], blabsyReplies: any[]): { matched: number; errors: string[] } { + const errors: string[] = []; + let matched = 0; + + for (const pictiqueComment of pictiqueComments) { + const authorEname = pictiqueComment.author?.ename || pictiqueComment.author?.id; + const commentText = pictiqueComment.text; + const parentPostId = pictiqueComment.postId; + + // Find matching reply by author, text, and parent tweet + const matchingReply = blabsyReplies.find((reply: any) => { + const replyAuthor = reply.createdBy; + const replyText = reply.text; + const replyParentId = reply.parent?.id; + + return authorEname === replyAuthor && + commentText === replyText && + (replyParentId === parentPostId || true); // Parent ID might differ, so we'll match by content only for now + }); + + if (matchingReply) { + matched++; + } else { + errors.push(`Comment not found in Blabsy: author="${authorEname}", text="${commentText}"`); + } + } + + return { matched, errors }; +} + +/** + * Compare likes by user and post/tweet content + */ +function compareLikes(pictiqueLikes: any[], blabsyLikes: any[]): { matched: number; errors: string[] } { + const errors: string[] = []; + let matched = 0; + + for (const pictiqueLike of pictiqueLikes) { + const userId = pictiqueLike.userId; + const postText = pictiqueLike.postText; + + const matchingLike = blabsyLikes.find((blabsyLike: any) => { + return blabsyLike.userId === userId && blabsyLike.tweetText === postText; + }); + + if (matchingLike) { + matched++; + } else { + errors.push(`Like not found in Blabsy: user="${userId}", postText="${postText}"`); + } + } + + return { matched, errors }; +} + +/** + * Compare chats by participants (as sets, order doesn't matter) + */ +function compareChats(pictiqueChats: any[], blabsyChats: any[]): { matched: number; errors: string[] } { + const errors: string[] = []; + let matched = 0; + + for (const pictiqueChat of pictiqueChats) { + const pictiqueParticipants = new Set( + (pictiqueChat.participants || []).map((p: any) => p.id || p) + ); + + const matchingChat = blabsyChats.find((blabsyChat: any) => { + const blabsyParticipants = new Set(blabsyChat.participants || []); + + if (pictiqueParticipants.size !== blabsyParticipants.size) { + return false; + } + + for (const participant of pictiqueParticipants) { + if (!blabsyParticipants.has(participant)) { + return false; + } + } + + return true; + }); + + if (matchingChat) { + matched++; + } else { + errors.push(`Chat not found in Blabsy: participants=[${Array.from(pictiqueParticipants).join(', ')}]`); + } + } + + return { matched, errors }; +} + +/** + * Compare messages by sender, text, and chat participants + */ +function compareMessages(pictiqueMessages: any[], blabsyMessages: any[]): { matched: number; errors: string[] } { + const errors: string[] = []; + let matched = 0; + + for (const pictiqueMessage of pictiqueMessages) { + const senderId = pictiqueMessage.sender?.id || pictiqueMessage.sender?.ename || pictiqueMessage.senderId; + const messageText = pictiqueMessage.text; + const chatParticipants = new Set(pictiqueMessage.chatParticipants || []); + + const matchingMessage = blabsyMessages.find((blabsyMessage: any) => { + const blabsySender = blabsyMessage.senderId; + const blabsyText = blabsyMessage.text; + const blabsyChatParticipants = new Set(blabsyMessage.chatParticipants || []); + + if (senderId !== blabsySender || messageText !== blabsyText) { + return false; + } + + if (chatParticipants.size !== blabsyChatParticipants.size) { + return false; + } + + for (const participant of chatParticipants) { + if (!blabsyChatParticipants.has(participant)) { + return false; + } + } + + return true; + }); + + if (matchingMessage) { + matched++; + } else { + errors.push(`Message not found in Blabsy: sender="${senderId}", text="${messageText}"`); + } + } + + return { matched, errors }; +} + +/** + * Compare all data from both platforms + */ +export async function compareAllData( + pictiqueData: PictiqueData, + blabsyData: BlabsyData +): Promise { + const errors: string[] = []; + + // Compare posts (Pictique -> Blabsy) + const postsComparison = comparePosts(pictiqueData.posts, blabsyData.tweets); + // Counts must match and all Pictique posts must have matches + const postsMatch = postsComparison.matched === pictiqueData.posts.length && + blabsyData.tweets.length === pictiqueData.posts.length; + errors.push(...postsComparison.errors); + + // Compare comments + const commentsComparison = compareComments(pictiqueData.comments, blabsyData.replies); + const commentsMatch = commentsComparison.matched === pictiqueData.comments.length && + blabsyData.replies.length === pictiqueData.comments.length; + errors.push(...commentsComparison.errors); + + // Compare likes + const likesComparison = compareLikes(pictiqueData.likes, blabsyData.likes); + const likesMatch = likesComparison.matched === pictiqueData.likes.length && + blabsyData.likes.length === pictiqueData.likes.length; + errors.push(...likesComparison.errors); + + // Compare chats + const chatsComparison = compareChats(pictiqueData.chats, blabsyData.chats); + const chatsMatch = chatsComparison.matched === pictiqueData.chats.length && + blabsyData.chats.length === pictiqueData.chats.length; + errors.push(...chatsComparison.errors); + + // Compare messages + const messagesComparison = compareMessages(pictiqueData.messages, blabsyData.messages); + const messagesMatch = messagesComparison.matched === pictiqueData.messages.length && + blabsyData.messages.length === pictiqueData.messages.length; + errors.push(...messagesComparison.errors); + + return { + postsMatch, + commentsMatch, + likesMatch, + chatsMatch, + messagesMatch, + errors, + details: { + pictiquePosts: pictiqueData.posts.length, + blabsyTweets: blabsyData.tweets.length, + matchedPosts: postsComparison.matched, + pictiqueComments: pictiqueData.comments.length, + blabsyReplies: blabsyData.replies.length, + matchedComments: commentsComparison.matched, + pictiqueLikes: pictiqueData.likes.length, + blabsyLikes: blabsyData.likes.length, + matchedLikes: likesComparison.matched, + pictiqueChats: pictiqueData.chats.length, + blabsyChats: blabsyData.chats.length, + matchedChats: chatsComparison.matched, + pictiqueMessages: pictiqueData.messages.length, + blabsyMessages: blabsyData.messages.length, + matchedMessages: messagesComparison.matched, + }, + }; +} + diff --git a/tests/src/utils/firebase-client.ts b/tests/src/utils/firebase-client.ts new file mode 100644 index 00000000..94f23423 --- /dev/null +++ b/tests/src/utils/firebase-client.ts @@ -0,0 +1,330 @@ +import { getFirestore, Timestamp, FieldValue } from 'firebase-admin/firestore'; +import { initializeFirebase } from './user-factory'; + +let firestore: FirebaseFirestore.Firestore | null = null; + +function getFirestoreInstance(): FirebaseFirestore.Firestore { + if (!firestore) { + initializeFirebase(); + firestore = getFirestore(); + if (!firestore) { + throw new Error('Failed to initialize Firestore'); + } + } + return firestore; +} + +export interface EditableUserData { + name?: string; + bio?: string | null; + location?: string | null; + website?: string | null; + photoURL?: string | null; + coverPhotoURL?: string | null; +} + +/** + * Update user data in Firestore (mimics frontend updateUserData) + */ +export async function updateUserData( + userId: string, + userData: EditableUserData +): Promise { + const db = getFirestoreInstance(); + const userRef = db.collection('users').doc(userId); + await userRef.update({ + ...userData, + updatedAt: FieldValue.serverTimestamp(), + }); +} + +/** + * Create a tweet in Firestore + */ +export async function createTweet( + userId: string, + text: string | null, + images: any[] | null = null, + parentId?: string +): Promise { + const db = getFirestoreInstance(); + const tweetRef = db.collection('tweets').doc(); + + let parent = null; + if (parentId) { + const parentTweet = await db.collection('tweets').doc(parentId).get(); + if (parentTweet.exists) { + const parentData = parentTweet.data(); + parent = { + id: parentId, + username: parentData?.username || 'unknown', + }; + } + } + + const tweetData = { + id: tweetRef.id, + text: text || null, + images: images || null, + parent: parent, + userLikes: [], + createdBy: userId, + createdAt: FieldValue.serverTimestamp(), + updatedAt: FieldValue.serverTimestamp(), + userReplies: 0, + userRetweets: [], + }; + + await tweetRef.set(tweetData); + + // Increment total tweets + const userRef = db.collection('users').doc(userId); + await userRef.update({ + totalTweets: FieldValue.increment(1), + updatedAt: FieldValue.serverTimestamp(), + }); + + return tweetRef.id; +} + +/** + * Toggle like on a tweet + */ +export async function toggleLike( + userId: string, + tweetId: string, + isLiked: boolean +): Promise { + const db = getFirestoreInstance(); + const tweetRef = db.collection('tweets').doc(tweetId); + const userStatsRef = db.collection('users').doc(userId).collection('stats').doc('stats'); + + // Ensure stats document exists + const statsDoc = await userStatsRef.get(); + if (!statsDoc.exists) { + await userStatsRef.set({ + likes: [], + tweets: [], + updatedAt: FieldValue.serverTimestamp(), + }); + } + + if (isLiked) { + // Like the tweet + await tweetRef.update({ + userLikes: FieldValue.arrayUnion(userId), + updatedAt: FieldValue.serverTimestamp(), + }); + await userStatsRef.set({ + likes: FieldValue.arrayUnion(tweetId), + updatedAt: FieldValue.serverTimestamp(), + }, { merge: true }); + } else { + // Unlike the tweet + await tweetRef.update({ + userLikes: FieldValue.arrayRemove(userId), + updatedAt: FieldValue.serverTimestamp(), + }); + await userStatsRef.set({ + likes: FieldValue.arrayRemove(tweetId), + updatedAt: FieldValue.serverTimestamp(), + }, { merge: true }); + } +} + +/** + * Create a reply (comment) to a tweet + */ +export async function createReply( + userId: string, + parentTweetId: string, + text: string +): Promise { + const db = getFirestoreInstance(); + + // Get parent tweet to extract username + const parentTweet = await db.collection('tweets').doc(parentTweetId).get(); + if (!parentTweet.exists) { + throw new Error('Parent tweet not found'); + } + + const parentData = parentTweet.data(); + const parentUser = await db.collection('users').doc(parentData?.createdBy).get(); + const parentUserData = parentUser.data(); + + const replyRef = db.collection('tweets').doc(); + const replyData = { + id: replyRef.id, + text: text, + images: null, + parent: { + id: parentTweetId, + username: parentUserData?.username || 'unknown', + }, + userLikes: [], + createdBy: userId, + createdAt: FieldValue.serverTimestamp(), + updatedAt: FieldValue.serverTimestamp(), + userReplies: 0, + userRetweets: [], + }; + + await replyRef.set(replyData); + + // Increment reply count on parent + await db.collection('tweets').doc(parentTweetId).update({ + userReplies: FieldValue.increment(1), + updatedAt: FieldValue.serverTimestamp(), + }); + + // Increment total tweets for user + await db.collection('users').doc(userId).update({ + totalTweets: FieldValue.increment(1), + updatedAt: FieldValue.serverTimestamp(), + }); + + return replyRef.id; +} + +/** + * Create a chat + */ +export async function createChat( + participants: string[], + name?: string, + owner?: string, + description?: string +): Promise { + const db = getFirestoreInstance(); + const chatsCollection = db.collection('chats'); + + // Check for existing DM (2 participants, no name) before creating + const isDM = participants.length === 2 && !name; + + if (isDM) { + // Check if a direct chat already exists between these users + const existingChatsQuery = chatsCollection.where('participants', 'array-contains', participants[0]); + const existingChatsSnapshot = await existingChatsQuery.get(); + + for (const doc of existingChatsSnapshot.docs) { + const chat = doc.data(); + // Check if it's a direct chat (2 participants) with same participants + if ( + chat.participants && + Array.isArray(chat.participants) && + chat.participants.length === 2 && + chat.participants.includes(participants[0]) && + chat.participants.includes(participants[1]) + ) { + return doc.id; // Return existing chat ID + } + } + } + + // No existing DM found or it's a group chat - create new + const chatRef = chatsCollection.doc(); + const isGroup = participants.length > 2; + + const chatData = { + id: chatRef.id, + participants: participants, + ...(name && { name }), + ...(owner && { owner }), + ...(description && { description }), + admins: isGroup ? (owner ? [owner] : []) : [], + createdAt: FieldValue.serverTimestamp(), + updatedAt: FieldValue.serverTimestamp(), + }; + + await chatRef.set(chatData); + return chatRef.id; +} + +/** + * Send a message in a chat + */ +export async function sendMessage( + chatId: string, + senderId: string, + text: string +): Promise { + const db = getFirestoreInstance(); + const messageId = db.collection('chats').doc().id; // Generate ID + const messageRef = db.collection('chats').doc(chatId).collection('messages').doc(messageId); + const chatRef = db.collection('chats').doc(chatId); + + const messageData = { + id: messageId, + chatId: chatId, + senderId: senderId, + text: text, + createdAt: FieldValue.serverTimestamp(), + updatedAt: FieldValue.serverTimestamp(), + readBy: [senderId], + }; + + await messageRef.set(messageData); + await chatRef.update({ + lastMessage: { + text: text, + senderId: senderId, + timestamp: FieldValue.serverTimestamp(), + }, + updatedAt: FieldValue.serverTimestamp(), + }); + + return messageId; +} + +/** + * Get user document from Firestore + */ +export async function getUser(userId: string): Promise { + const db = getFirestoreInstance(); + const userDoc = await db.collection('users').doc(userId).get(); + if (!userDoc.exists) { + return null; + } + return userDoc.data(); +} + +/** + * Get tweet document from Firestore + */ +export async function getTweet(tweetId: string): Promise { + const db = getFirestoreInstance(); + const tweetDoc = await db.collection('tweets').doc(tweetId).get(); + if (!tweetDoc.exists) { + return null; + } + return tweetDoc.data(); +} + +/** + * Get chat document from Firestore + */ +export async function getChat(chatId: string): Promise { + const db = getFirestoreInstance(); + const chatDoc = await db.collection('chats').doc(chatId).get(); + if (!chatDoc.exists) { + return null; + } + return chatDoc.data(); +} + +/** + * Get messages from a chat + */ +export async function getChatMessages(chatId: string, limit: number = 50): Promise { + const db = getFirestoreInstance(); + const messagesSnapshot = await db + .collection('chats') + .doc(chatId) + .collection('messages') + .orderBy('createdAt', 'desc') + .limit(limit) + .get(); + + return messagesSnapshot.docs.map(doc => doc.data()); +} + diff --git a/tests/src/utils/sync-verifier.ts b/tests/src/utils/sync-verifier.ts new file mode 100644 index 00000000..b1800357 --- /dev/null +++ b/tests/src/utils/sync-verifier.ts @@ -0,0 +1,607 @@ +import { getUser as getFirebaseUser, getTweet, getChat, getChatMessages } from './firebase-client'; +import { + getUser as getApiUser, + searchUsers, + getPost, + getChatMessages as getApiChatMessages, + getChat as getApiChat, + getPostComments, + getApiClient, +} from './api-client'; +import { config } from '../config/env'; +import { generateToken } from './api-client'; +import axios from 'axios'; + +/** + * Wait for sync to complete + */ +export async function waitForSync(waitMs: number = config.syncBufferTime): Promise { + await new Promise(resolve => setTimeout(resolve, waitMs)); +} + +/** + * Check if enough time has passed since last update (prevention mechanism) + */ +export function checkPreventionWindow(lastUpdateTime: number): boolean { + const timeSinceUpdate = Date.now() - lastUpdateTime; + return timeSinceUpdate >= config.preventionWindow; +} + +/** + * Verify user sync from blabsy to pictique + */ +export async function verifyUserSyncBlabsyToPictique( + ename: string, + expectedData: { + name?: string; + bio?: string | null; + location?: string | null; + website?: string | null; + photoURL?: string | null; + coverPhotoURL?: string | null; + username?: string; + } +): Promise<{ success: boolean; errors: string[] }> { + const errors: string[] = []; + + try { + // Wait for sync + await waitForSync(); + + // Find user in pictique by ename + const pictiqueUsers = await searchUsers(ename); + const pictiqueUser = pictiqueUsers.find((u: any) => u.ename === ename); + + if (!pictiqueUser) { + errors.push(`User ${ename} not found in pictique`); + return { success: false, errors }; + } + + // Verify field mappings + if (expectedData.name && pictiqueUser.name !== expectedData.name) { + errors.push(`Name mismatch: expected "${expectedData.name}", got "${pictiqueUser.name}"`); + } + + if (expectedData.bio !== undefined) { + const expectedBio = expectedData.bio || null; + const actualBio = pictiqueUser.description || null; + if (actualBio !== expectedBio) { + errors.push(`Bio mismatch: expected "${expectedBio}", got "${actualBio}"`); + } + } + + if (expectedData.username && pictiqueUser.handle !== expectedData.username) { + errors.push(`Handle mismatch: expected "${expectedData.username}", got "${pictiqueUser.handle}"`); + } + + if (expectedData.photoURL && pictiqueUser.avatarUrl !== expectedData.photoURL) { + errors.push(`Avatar mismatch: expected "${expectedData.photoURL}", got "${pictiqueUser.avatarUrl}"`); + } + + if (expectedData.coverPhotoURL !== undefined) { + const expectedBanner = expectedData.coverPhotoURL || null; + const actualBanner = pictiqueUser.bannerUrl || null; + if (actualBanner !== expectedBanner) { + errors.push(`Banner mismatch: expected "${expectedBanner}", got "${actualBanner}"`); + } + } + + return { + success: errors.length === 0, + errors, + }; + } catch (error: any) { + errors.push(`Error verifying user sync: ${error.message}`); + return { success: false, errors }; + } +} + +/** + * Verify user sync from pictique to blabsy + */ +export async function verifyUserSyncPictiqueToBlabsy( + ename: string, + expectedData: { + name?: string; + description?: string | null; + handle?: string; + avatarUrl?: string | null; + bannerUrl?: string | null; + } +): Promise<{ success: boolean; errors: string[] }> { + const errors: string[] = []; + + try { + // Wait for sync + await waitForSync(); + + // Get user from blabsy Firestore + const blabsyUser = await getFirebaseUser(ename); + + if (!blabsyUser) { + errors.push(`User ${ename} not found in blabsy`); + return { success: false, errors }; + } + + // Verify field mappings + if (expectedData.name && blabsyUser.name !== expectedData.name) { + errors.push(`Name mismatch: expected "${expectedData.name}", got "${blabsyUser.name}"`); + } + + if (expectedData.description !== undefined) { + const expectedBio = expectedData.description || null; + const actualBio = blabsyUser.bio || null; + if (actualBio !== expectedBio) { + errors.push(`Bio mismatch: expected "${expectedBio}", got "${actualBio}"`); + } + } + + if (expectedData.handle && blabsyUser.username !== expectedData.handle) { + errors.push(`Username mismatch: expected "${expectedData.handle}", got "${blabsyUser.username}"`); + } + + if (expectedData.avatarUrl !== undefined) { + const expectedPhoto = expectedData.avatarUrl || null; + const actualPhoto = blabsyUser.photoURL || null; + if (actualPhoto !== expectedPhoto) { + errors.push(`Photo mismatch: expected "${expectedPhoto}", got "${actualPhoto}"`); + } + } + + if (expectedData.bannerUrl !== undefined) { + const expectedCover = expectedData.bannerUrl || null; + const actualCover = blabsyUser.coverPhotoURL || null; + if (actualCover !== expectedCover) { + errors.push(`Cover mismatch: expected "${expectedCover}", got "${actualCover}"`); + } + } + + return { + success: errors.length === 0, + errors, + }; + } catch (error: any) { + errors.push(`Error verifying user sync: ${error.message}`); + return { success: false, errors }; + } +} + +/** + * Verify post/tweet sync from pictique to blabsy + * Searches by author and text content since IDs differ between platforms + */ +export async function verifyPostSyncPictiqueToBlabsy( + postId: string, + expectedData: { + text: string; + authorId: string; + } +): Promise<{ success: boolean; errors: string[] }> { + const errors: string[] = []; + + try { + await waitForSync(); + + // Search for tweet by author and text content (IDs differ between platforms) + // Get user's tweets from Firestore and find matching one + const { getFirestore } = require('firebase-admin/firestore'); + const { initializeFirebase } = require('./user-factory'); + initializeFirebase(); + const db = getFirestore(); + + // Query tweets by author (ename) and text + const tweetsSnapshot = await db + .collection('tweets') + .where('createdBy', '==', expectedData.authorId) + .where('text', '==', expectedData.text) + .limit(10) + .get(); + + if (tweetsSnapshot.empty) { + errors.push(`Tweet not found in blabsy: text="${expectedData.text}", author="${expectedData.authorId}"`); + return { success: false, errors }; + } + + return { + success: true, + errors: [], + }; + } catch (error: any) { + errors.push(`Error verifying post sync: ${error.message}`); + return { success: false, errors }; + } +} + +/** + * Verify post/tweet sync from blabsy to pictique + * Searches by author and text content since IDs differ between platforms + */ +export async function verifyPostSyncBlabsyToPictique( + tweetId: string, + expectedData: { + text: string | null; + createdBy: string; + }, + token: string +): Promise<{ success: boolean; errors: string[] }> { + const errors: string[] = []; + + try { + await waitForSync(); + + // Search for post by author and text content (IDs differ between platforms) + const client = getApiClient(); + const response = await client.get('/api/posts/feed', { + headers: { + Authorization: `Bearer ${token}`, + }, + params: { + limit: 100, + }, + }); + + const posts = response.data || []; + + // Find post by matching author ename and text content + const post = posts.find((p: any) => { + const authorMatches = p.author?.ename === expectedData.createdBy || p.author?.id === expectedData.createdBy; + const textMatches = p.text === expectedData.text; + return authorMatches && textMatches; + }); + + if (!post) { + errors.push(`Post not found in pictique: text="${expectedData.text}", author="${expectedData.createdBy}"`); + return { success: false, errors }; + } + + return { + success: true, + errors: [], + }; + } catch (error: any) { + errors.push(`Error verifying post sync: ${error.message}`); + return { success: false, errors }; + } +} + +/** + * Verify comment sync from pictique to blabsy + */ +export async function verifyCommentSyncPictiqueToBlabsy( + commentId: string, + expectedData: { + text: string; + postId: string; + } +): Promise<{ success: boolean; errors: string[] }> { + const errors: string[] = []; + + try { + await waitForSync(); + + // Comments in blabsy are replies (tweets with parent) + const reply = await getTweet(commentId); + + if (!reply) { + errors.push(`Reply ${commentId} not found in blabsy`); + return { success: false, errors }; + } + + if (reply.text !== expectedData.text) { + errors.push(`Text mismatch: expected "${expectedData.text}", got "${reply.text}"`); + } + + if (reply.parent?.id !== expectedData.postId) { + errors.push(`Parent mismatch: expected "${expectedData.postId}", got "${reply.parent?.id}"`); + } + + return { + success: errors.length === 0, + errors, + }; + } catch (error: any) { + errors.push(`Error verifying comment sync: ${error.message}`); + return { success: false, errors }; + } +} + +/** + * Verify comment sync from blabsy to pictique + */ +export async function verifyCommentSyncBlabsyToPictique( + replyId: string, + expectedData: { + text: string; + parentTweetId: string; + }, + token: string +): Promise<{ success: boolean; errors: string[] }> { + const errors: string[] = []; + + try { + await waitForSync(); + + // Get comments for the parent post + const comments = await getPostComments(expectedData.parentTweetId, token); + const comment = comments.find((c: any) => c.id === replyId); + + if (!comment) { + errors.push(`Comment ${replyId} not found in pictique`); + return { success: false, errors }; + } + + if (comment.text !== expectedData.text) { + errors.push(`Text mismatch: expected "${expectedData.text}", got "${comment.text}"`); + } + + return { + success: errors.length === 0, + errors, + }; + } catch (error: any) { + errors.push(`Error verifying comment sync: ${error.message}`); + return { success: false, errors }; + } +} + +/** + * Verify like sync from pictique to blabsy + */ +export async function verifyLikeSyncPictiqueToBlabsy( + postId: string, + userId: string, + isLiked: boolean +): Promise<{ success: boolean; errors: string[] }> { + const errors: string[] = []; + + try { + await waitForSync(); + + const tweet = await getTweet(postId); + + if (!tweet) { + errors.push(`Tweet ${postId} not found in blabsy`); + return { success: false, errors }; + } + + const hasLike = tweet.userLikes?.includes(userId) || false; + + if (hasLike !== isLiked) { + errors.push(`Like status mismatch: expected ${isLiked}, got ${hasLike}`); + } + + return { + success: errors.length === 0, + errors, + }; + } catch (error: any) { + errors.push(`Error verifying like sync: ${error.message}`); + return { success: false, errors }; + } +} + +/** + * Verify like sync from blabsy to pictique + */ +export async function verifyLikeSyncBlabsyToPictique( + tweetId: string, + userId: string, + isLiked: boolean, + token: string +): Promise<{ success: boolean; errors: string[] }> { + const errors: string[] = []; + + try { + await waitForSync(); + + const post = await getPost(tweetId, token); + + if (!post) { + errors.push(`Post ${tweetId} not found in pictique`); + return { success: false, errors }; + } + + const hasLike = post.likedBy?.some((u: any) => u.id === userId) || false; + + if (hasLike !== isLiked) { + errors.push(`Like status mismatch: expected ${isLiked}, got ${hasLike}`); + } + + return { + success: errors.length === 0, + errors, + }; + } catch (error: any) { + errors.push(`Error verifying like sync: ${error.message}`); + return { success: false, errors }; + } +} + +/** + * Verify message sync from pictique to blabsy + */ +export async function verifyMessageSyncPictiqueToBlabsy( + messageId: string, + chatId: string, + expectedData: { + text: string; + senderId: string; + } +): Promise<{ success: boolean; errors: string[] }> { + const errors: string[] = []; + + try { + await waitForSync(); + + const messages = await getChatMessages(chatId); + const message = messages.find((m: any) => m.id === messageId); + + if (!message) { + errors.push(`Message ${messageId} not found in blabsy`); + return { success: false, errors }; + } + + if (message.text !== expectedData.text) { + errors.push(`Text mismatch: expected "${expectedData.text}", got "${message.text}"`); + } + + if (message.senderId !== expectedData.senderId) { + errors.push(`Sender mismatch: expected "${expectedData.senderId}", got "${message.senderId}"`); + } + + return { + success: errors.length === 0, + errors, + }; + } catch (error: any) { + errors.push(`Error verifying message sync: ${error.message}`); + return { success: false, errors }; + } +} + +/** + * Verify message sync from blabsy to pictique + */ +export async function verifyMessageSyncBlabsyToPictique( + messageId: string, + chatId: string, + expectedData: { + text: string; + senderId: string; + }, + token: string +): Promise<{ success: boolean; errors: string[] }> { + const errors: string[] = []; + + try { + await waitForSync(); + + const messages = await getApiChatMessages(chatId, token); + const message = messages.find((m: any) => m.id === messageId); + + if (!message) { + errors.push(`Message ${messageId} not found in pictique`); + return { success: false, errors }; + } + + if (message.text !== expectedData.text) { + errors.push(`Text mismatch: expected "${expectedData.text}", got "${message.text}"`); + } + + if (message.sender?.id !== expectedData.senderId) { + errors.push(`Sender mismatch: expected "${expectedData.senderId}", got "${message.sender?.id}"`); + } + + return { + success: errors.length === 0, + errors, + }; + } catch (error: any) { + errors.push(`Error verifying message sync: ${error.message}`); + return { success: false, errors }; + } +} + +/** + * Verify chat sync from pictique to blabsy + */ +export async function verifyChatSyncPictiqueToBlabsy( + chatId: string, + expectedData: { + participants: string[]; + name?: string; + } +): Promise<{ success: boolean; errors: string[] }> { + const errors: string[] = []; + + try { + await waitForSync(); + + const chat = await getChat(chatId); + + if (!chat) { + errors.push(`Chat ${chatId} not found in blabsy`); + return { success: false, errors }; + } + + // Check participants (order might differ, so check as sets) + const expectedParticipants = new Set(expectedData.participants); + const actualParticipants = new Set(chat.participants || []); + + if (expectedParticipants.size !== actualParticipants.size) { + errors.push(`Participant count mismatch: expected ${expectedParticipants.size}, got ${actualParticipants.size}`); + } + + for (const participant of expectedParticipants) { + if (!actualParticipants.has(participant)) { + errors.push(`Missing participant: ${participant}`); + } + } + + if (expectedData.name && chat.name !== expectedData.name) { + errors.push(`Name mismatch: expected "${expectedData.name}", got "${chat.name}"`); + } + + return { + success: errors.length === 0, + errors, + }; + } catch (error: any) { + errors.push(`Error verifying chat sync: ${error.message}`); + return { success: false, errors }; + } +} + +/** + * Verify chat sync from blabsy to pictique + */ +export async function verifyChatSyncBlabsyToPictique( + chatId: string, + expectedData: { + participants: string[]; + name?: string; + }, + token: string +): Promise<{ success: boolean; errors: string[] }> { + const errors: string[] = []; + + try { + await waitForSync(); + + const chat = await getApiChat(chatId, token); + + if (!chat) { + errors.push(`Chat ${chatId} not found in pictique`); + return { success: false, errors }; + } + + // Check participants + const expectedParticipants = new Set(expectedData.participants); + const actualParticipants = new Set( + chat.participants?.map((p: any) => p.id || p) || [] + ); + + if (expectedParticipants.size !== actualParticipants.size) { + errors.push(`Participant count mismatch: expected ${expectedParticipants.size}, got ${actualParticipants.size}`); + } + + for (const participant of expectedParticipants) { + if (!actualParticipants.has(participant)) { + errors.push(`Missing participant: ${participant}`); + } + } + + if (expectedData.name && chat.name !== expectedData.name) { + errors.push(`Name mismatch: expected "${expectedData.name}", got "${chat.name}"`); + } + + return { + success: errors.length === 0, + errors, + }; + } catch (error: any) { + errors.push(`Error verifying chat sync: ${error.message}`); + return { success: false, errors }; + } +} + diff --git a/tests/src/utils/user-cache.ts b/tests/src/utils/user-cache.ts new file mode 100644 index 00000000..e9da0e3b --- /dev/null +++ b/tests/src/utils/user-cache.ts @@ -0,0 +1,96 @@ +import { TestUser } from './user-factory'; +import fs from 'fs'; +import path from 'path'; + +const CACHE_FILE = path.join(__dirname, '../../.test-users-cache.json'); + +export interface CachedUsers { + users: TestUser[]; + createdAt: string; + userCount: number; +} + +/** + * Load cached users from disk + */ +export function loadCachedUsers(): CachedUsers | null { + try { + if (fs.existsSync(CACHE_FILE)) { + const cacheData = fs.readFileSync(CACHE_FILE, 'utf-8'); + const cached: CachedUsers = JSON.parse(cacheData); + + // Validate cache structure + if (cached.users && Array.isArray(cached.users) && cached.users.length > 0) { + return cached; + } + } + } catch (error) { + console.warn('Failed to load user cache:', error); + } + + return null; +} + +/** + * Save users to cache + */ +export function saveCachedUsers(users: TestUser[]): void { + try { + const cacheData: CachedUsers = { + users, + createdAt: new Date().toISOString(), + userCount: users.length, + }; + + fs.writeFileSync(CACHE_FILE, JSON.stringify(cacheData, null, 2), 'utf-8'); + console.log(`Cached ${users.length} test users to ${CACHE_FILE}`); + } catch (error) { + console.warn('Failed to save user cache:', error); + } +} + +/** + * Clear user cache + */ +export function clearUserCache(): void { + try { + if (fs.existsSync(CACHE_FILE)) { + fs.unlinkSync(CACHE_FILE); + console.log('User cache cleared'); + } + } catch (error) { + console.warn('Failed to clear user cache:', error); + } +} + +/** + * Check if cached users match the requested count + */ +export function isCacheValid(requestedCount: number): boolean { + const cached = loadCachedUsers(); + if (!cached) { + return false; + } + + // Cache is valid if it has at least the requested number of users + return cached.users.length >= requestedCount; +} + +/** + * Get cached users (up to requested count) + */ +export function getCachedUsers(requestedCount: number): TestUser[] | null { + const cached = loadCachedUsers(); + if (!cached) { + return null; + } + + if (cached.users.length >= requestedCount) { + console.log(`Cache hit: Found ${cached.users.length} cached users, using ${requestedCount}`); + return cached.users.slice(0, requestedCount); + } + + console.log(`Cache miss: Cached users (${cached.users.length}) < requested (${requestedCount})`); + return null; +} + diff --git a/tests/src/utils/user-factory.ts b/tests/src/utils/user-factory.ts new file mode 100644 index 00000000..c9821f81 --- /dev/null +++ b/tests/src/utils/user-factory.ts @@ -0,0 +1,285 @@ +import { initializeApp, cert, getApps, App } from 'firebase-admin/app'; +import { getAuth } from 'firebase-admin/auth'; +import { getFirestore, Timestamp } from 'firebase-admin/firestore'; +import { config } from '../config/env'; +import path from 'path'; +import fs from 'fs'; +import { v4 as uuidv4 } from 'uuid'; +import * as falso from '@ngneat/falso'; +import axios from 'axios'; + +let firebaseApp: App | null = null; +let firestore: any = null; + +export interface TestUser { + id: string; + ename: string; + email: string; + username: string; + name: string; + firebaseUid: string; +} + +/** + * Initialize Firebase Admin SDK + */ +export function initializeFirebase(): void { + if (getApps().length > 0) { + firebaseApp = getApps()[0]; + firestore = getFirestore(firebaseApp); + return; + } + + let credential; + + if (config.googleApplicationCredentials) { + // Resolve path relative to project root (where .env file is) + // When running from staging-load-tests directory, go up one level to project root + const credentialsPath = path.resolve(process.cwd(), '..', config.googleApplicationCredentials); + + // Set environment variable for Firebase Admin SDK + process.env.GOOGLE_APPLICATION_CREDENTIALS = credentialsPath; + + if (fs.existsSync(credentialsPath)) { + // eslint-disable-next-line @typescript-eslint/no-require-imports + const serviceAccount = require(credentialsPath); + credential = cert(serviceAccount); + } else { + throw new Error(`Firebase credentials file not found at: ${credentialsPath}`); + } + } else { + // Try using application default credentials (for environments like GCP) + credential = undefined; + } + + firebaseApp = initializeApp({ + credential: credential, + projectId: config.firebaseProjectId, + }); + + firestore = getFirestore(firebaseApp); +} + +/** + * Provision an eName using evault-core + */ +async function provisionEName(): Promise { + if (!config.registryUrl) { + throw new Error('PUBLIC_REGISTRY_URL is not set in environment variables'); + } + if (!config.provisionerUrl) { + throw new Error('PUBLIC_PROVISIONER_URL is not set in environment variables'); + } + + // Step 1: Get entropy token from registry + const entropyResponse = await axios.get(`${config.registryUrl}/entropy`); + if (!entropyResponse.data?.token) { + throw new Error('Failed to get entropy token from registry'); + } + const registryEntropy = entropyResponse.data.token; + + // Step 2: Generate random namespace (UUID) + const namespace = uuidv4(); + + // Step 3: Provision eName via evault-core + const provisionResponse = await axios.post( + `${config.provisionerUrl}/provision`, + { + registryEntropy, + namespace, + verificationId: config.demoCodeW3DS, + publicKey: '0x0000000000000000000000000000000000000000', // Dummy public key for testing + } + ); + + if (!provisionResponse.data?.success || !provisionResponse.data?.w3id) { + throw new Error( + `Failed to provision eName: ${provisionResponse.data?.error || provisionResponse.data?.message || 'Unknown error'}` + ); + } + + // Return the w3id (eName) - it should already be in @ format + const w3id = provisionResponse.data.w3id; + // Ensure it starts with @ + return w3id.startsWith('@') ? w3id : `@${w3id}`; +} + +/** + * Create a test user in Firebase + * Users created in Firebase will automatically sync to pictique + * eName is provisioned via evault-core before creating the Firebase user + */ +export async function createTestUser(index: number): Promise { + if (!firestore) { + initializeFirebase(); + } + if (!firestore) { + throw new Error('Failed to initialize Firestore'); + } + + const auth = getAuth(firebaseApp!); + + // Provision eName via evault-core + const ename = await provisionEName(); + + // Username should be ename without the @ prefix + const username = ename.startsWith('@') ? ename.slice(1) : ename; + + // Make email unique with UUID to avoid conflicts from previous test runs + const email = `${falso.randEmail({ domain: 'staging-load-test.local' })}-${username}`; + const name = `${falso.randFirstName()} ${falso.randLastName()}`; + + // Create user in Firebase Auth + const userRecord = await auth.createUser({ + email, + displayName: name, + uid: ename, // Use ename as UID + }); + + // Create user document in Firestore + const userRef = firestore.collection('users').doc(ename); + await userRef.set({ + id: ename, + ename: ename, + name: name, + username: username, + bio: falso.randBoolean() ? falso.randSentence() : null, + photoURL: '/assets/twitter-avatar.jpg', + coverPhotoURL: falso.randBoolean() ? falso.randUrl() : null, + verified: falso.randBoolean({ probability: 0.1 }), // 10% chance of being verified + following: [], + followers: [], + createdAt: Timestamp.now(), + updatedAt: Timestamp.now(), + totalTweets: 0, + totalPhotos: 0, + pinnedTweet: null, + theme: falso.randBoolean() ? falso.rand(['light', 'dark', 'dim']) : null, + accent: falso.randBoolean() ? falso.rand(['blue', 'yellow', 'pink', 'purple', 'orange', 'green']) : null, + website: falso.randBoolean() ? falso.randUrl() : null, + location: falso.randBoolean() ? `${falso.randCity()}, ${falso.randCountry()}` : null, + }); + + return { + id: ename, + ename, + email, + username, + name, + firebaseUid: userRecord.uid, + }; +} + +/** + * Create multiple test users (with caching support) + * Users are created in parallel batches for better performance + */ +export async function createTestUsers(count: number, useCache: boolean = true): Promise { + // Check cache first if enabled + if (useCache) { + const { getCachedUsers, isCacheValid } = await import('./user-cache'); + if (isCacheValid(count)) { + const cached = getCachedUsers(count); + if (cached) { + console.log(`Using ${cached.length} cached test users`); + return cached; + } + } + } + + initializeFirebase(); + + console.log(`Creating ${count} test users in parallel batches...`); + + // Create users in parallel batches to speed up creation + const BATCH_SIZE = 5; // Create 5 users at a time + const users: TestUser[] = []; + const batches: number[][] = []; + + for (let i = 1; i <= count; i += BATCH_SIZE) { + batches.push( + Array.from({ length: Math.min(BATCH_SIZE, count - i + 1) }, (_, idx) => i + idx) + ); + } + + for (let batchIndex = 0; batchIndex < batches.length; batchIndex++) { + const batch = batches[batchIndex]; + console.log(`Creating batch ${batchIndex + 1}/${batches.length} (${batch.length} users)...`); + + const batchPromises = batch.map(async (index) => { + try { + return await createTestUser(index); + } catch (error: any) { + // If email already exists, try again with a new UUID + if (error.code === 'auth/email-already-exists') { + console.warn(`Email conflict for user ${index}, retrying...`); + return await createTestUser(index); + } else { + console.error(`Failed to create user ${index}:`, error); + throw error; + } + } + }); + + const batchUsers = await Promise.all(batchPromises); + users.push(...batchUsers); + + // Small delay between batches to avoid overwhelming the system + if (batchIndex < batches.length - 1) { + await new Promise(resolve => setTimeout(resolve, 200)); + } + } + + // Save to cache if enabled + if (useCache) { + const { saveCachedUsers } = await import('./user-cache'); + saveCachedUsers(users); + } + + return users; +} + +/** + * Cleanup test users + * NOTE: We don't delete users as deletion is not supported for sync. + * This function is kept for API compatibility but does nothing. + */ +export async function cleanupTestUsers(users: TestUser[]): Promise { + // No-op: Deletion is not supported for sync, so we don't clean up test users + console.log(`Note: Skipping cleanup of ${users.length} test users (deletion not supported for sync)`); +} + +/** + * Wait for user to sync to pictique + */ +export async function waitForUserSync(ename: string, maxWaitMs: number = 60000): Promise { + // eslint-disable-next-line @typescript-eslint/no-require-imports + const axios = require('axios'); + const startTime = Date.now(); + + while (Date.now() - startTime < maxWaitMs) { + try { + // Try to find user in pictique by ename + const response = await axios.get( + `${config.pictiqueBaseUri}/api/users/search/ename-name`, + { + params: { q: ename }, + } + ); + + if (response.data && response.data.length > 0) { + const user = response.data.find((u: any) => u.ename === ename); + if (user) { + return true; + } + } + } catch (error) { + // User might not exist yet, continue waiting + } + + await new Promise(resolve => setTimeout(resolve, 2000)); // Check every 2 seconds + } + + return false; +} + diff --git a/tests/tsconfig.json b/tests/tsconfig.json new file mode 100644 index 00000000..697decfd --- /dev/null +++ b/tests/tsconfig.json @@ -0,0 +1,31 @@ +{ + "compilerOptions": { + "target": "ES2020", + "module": "commonjs", + "lib": [ + "ES2020" + ], + "outDir": "./dist", + "rootDir": "./src", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "moduleResolution": "node", + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "types": [ + "node", + "vitest/globals" + ] + }, + "include": [ + "src/**/*" + ], + "exclude": [ + "node_modules", + "dist" + ] +} \ No newline at end of file diff --git a/tests/vitest.config.ts b/tests/vitest.config.ts new file mode 100644 index 00000000..cfe89c4e --- /dev/null +++ b/tests/vitest.config.ts @@ -0,0 +1,42 @@ +import { defineConfig } from 'vitest/config'; +import path from 'path'; + +export default defineConfig({ + test: { + glob: ['**/*.{test,spec}.{ts,tsx}'], + exclude: ['**/node_modules/**', '**/dist/**'], + environment: 'node', + globals: true, // Enable global test APIs (describe, test, expect, etc.) + testTimeout: 600000, // 10 minutes for load tests + hookTimeout: 300000, // 5 minutes for hooks (beforeAll, afterAll) + // Run tests sequentially to avoid overwhelming the system + pool: 'forks', + poolOptions: { + forks: { + singleFork: true, // Run all tests in a single fork + }, + }, + // Better reporting for long-running tests + logHeapUsage: false, + silent: false, + reporters: ['verbose', 'dot'], + // Coverage configuration + coverage: { + provider: 'v8', + reporter: ['text', 'json', 'html'], + exclude: [ + 'node_modules/', + 'dist/', + '**/*.d.ts', + '**/*.test.ts', + '**/*.spec.ts', + ], + }, + }, + resolve: { + alias: { + '@': path.resolve(__dirname, './src'), + }, + }, +}); +