diff --git a/.github/workflows/deploy-dev.yml b/.github/workflows/deploy-dev.yml index 61c2ebdd..c2e18381 100644 --- a/.github/workflows/deploy-dev.yml +++ b/.github/workflows/deploy-dev.yml @@ -1,181 +1,181 @@ name: Build, Tag & Deploy to Dev on: - push: - branches: - - dev + push: + branches: + - dev jobs: - build-and-deploy: - runs-on: ubuntu-latest - - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Get latest tag from Docker Hub - id: get_tag - run: | - # Fetch tags from Docker Hub - REPO="${{ secrets.DOCKER_USERNAME }}/api-yapper-backend" - - echo "🔍 Fetching tags from Docker Hub for $REPO..." - - # Get Docker Hub token for authenticated API access - TOKEN=$(curl -s -H "Content-Type: application/json" -X POST \ - -d '{"username": "${{ secrets.DOCKER_USERNAME }}", "password": "${{ secrets.DOCKER_PASSWORD }}"}' \ - https://hub.docker.com/v2/users/login/ | jq -r .token) - - if [ "$TOKEN" = "null" ] || [ -z "$TOKEN" ]; then - echo "❌ Failed to authenticate with Docker Hub" - exit 1 - fi - - # Get all tags from Docker Hub API - TAGS=$(curl -s -H "Authorization: JWT ${TOKEN}" \ - "https://hub.docker.com/v2/repositories/$REPO/tags/?page_size=100" | \ - jq -r '.results[].name' | \ - grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | \ - sort -V | \ - tail -1) - - # If no tags found, start with v0.0.0 - if [ -z "$TAGS" ]; then - LATEST_TAG="v0.0.0" - echo "âš ī¸ No version tags found on Docker Hub, starting from v0.0.0" - else - LATEST_TAG="$TAGS" - echo "đŸ“Ļ Latest tag found on Docker Hub: $LATEST_TAG" - fi - - echo "latest_tag=$LATEST_TAG" >> $GITHUB_OUTPUT - - # Parse version components - VERSION=${LATEST_TAG#v} - IFS='.' read -r MAJOR MINOR PATCH <<< "$VERSION" - - # Increment patch version - PATCH=$((PATCH + 1)) - NEW_TAG="v${MAJOR}.${MINOR}.${PATCH}" - - echo "new_tag=$NEW_TAG" >> $GITHUB_OUTPUT - echo "🚀 New version: $NEW_TAG" - - - name: Log in to Docker Hub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - - - name: Build and push Docker image - uses: docker/build-push-action@v6 - with: - context: . - file: ./docker/Dockerfile - - push: true - tags: | - ${{ secrets.DOCKER_USERNAME }}/api-yapper-backend:${{ steps.get_tag.outputs.new_tag }} - ${{ secrets.DOCKER_USERNAME }}/api-yapper-backend:dev - - - name: Confirm image push - run: | - echo "✅ Image pushed: ${{ secrets.DOCKER_USERNAME }}/api-yapper-backend:${{ steps.get_tag.outputs.new_tag }}" - echo "✅ Image tagged: ${{ secrets.DOCKER_USERNAME }}/api-yapper-backend:dev" - - - name: SSH into Dev VM and deploy - uses: appleboy/ssh-action@v1.0.0 - env: - IMG_TAG: ${{ steps.get_tag.outputs.new_tag }} - with: - host: ${{ secrets.DEV_SERVER_HOST }} - username: ${{ secrets.DEV_SERVER_USER }} - key: ${{ secrets.DEV_SERVER_SSH_KEY }} - envs: IMG_TAG - script: | - set -e - cd ~/yapper # path to your app on VM - - echo "🧭 Getting current running image tag..." - CURRENT_TAG=$(IMG_TAG="" docker compose ps -q api | xargs docker inspect -f '{{ .Config.Image }}' | cut -d':' -f2 || echo "unknown") - echo "CURRENT_TAG: $CURRENT_TAG" - echo "đŸŗ Setting new image tag for deployment..." - echo "Deploying version: $IMG_TAG" - - echo "🔄 Pulling new image and restarting app container..." - IMG_TAG=$IMG_TAG docker compose pull api api-local api-test - IMG_TAG=$IMG_TAG docker compose up -d api api-local api-test - - echo "âŗ Waiting for health check..." - sleep 10 - - HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" ${{ secrets.DEV_HEALTHCHECK_URL }}) - - if [ "$HTTP_CODE" = "200" ]; then - echo "✅ Dev deployment successful for version $IMG_TAG" - - echo "🔄 Running migrations for all services..." - MIGRATION_FAILED=0 - - echo "đŸ“Ļ Running migration for api..." - docker compose exec -T api npm run migration:run:prod 2>&1 | tee /tmp/api-migration.log - if grep -q "Error during migration" /tmp/api-migration.log || grep -q "Cannot find module" /tmp/api-migration.log; then - echo "❌ Migration for api failed!" - cat /tmp/api-migration.log - MIGRATION_FAILED=1 - elif grep -q "No migrations are pending" /tmp/api-migration.log || grep -q "No pending migrations" /tmp/api-migration.log; then - echo "â„šī¸ No pending migrations for api" - else - echo "✅ Migration for api completed successfully" - fi - - echo "đŸ“Ļ Running migration for api-local..." - docker compose exec -T api-local npm run migration:run:prod 2>&1 | tee /tmp/api-local-migration.log - if grep -q "Error during migration" /tmp/api-local-migration.log || grep -q "Cannot find module" /tmp/api-local-migration.log; then - echo "❌ Migration for api-local failed!" - cat /tmp/api-local-migration.log - MIGRATION_FAILED=1 - elif grep -q "No migrations are pending" /tmp/api-local-migration.log || grep -q "No pending migrations" /tmp/api-local-migration.log; then - echo "â„šī¸ No pending migrations for api-local" - else - echo "✅ Migration for api-local completed successfully" - fi - - echo "đŸ“Ļ Running migration for api-test..." - docker compose exec -T api-test npm run migration:run:prod 2>&1 | tee /tmp/api-test-migration.log - if grep -q "Error during migration" /tmp/api-test-migration.log || grep -q "Cannot find module" /tmp/api-test-migration.log; then - echo "❌ Migration for api-test failed!" - cat /tmp/api-test-migration.log - MIGRATION_FAILED=1 - elif grep -q "No migrations are pending" /tmp/api-test-migration.log || grep -q "No pending migrations" /tmp/api-test-migration.log; then - echo "â„šī¸ No pending migrations for api-test" - else - echo "✅ Migration for api-test completed successfully" - fi - - if [ "$MIGRATION_FAILED" = "1" ]; then - echo "❌ One or more migrations failed! Rolling back..." - if [ "$CURRENT_TAG" != "unknown" ]; then - IMG_TAG=$CURRENT_TAG docker compose up -d api api-local api-test - echo "🔙 Rolled back to $CURRENT_TAG" - fi - exit 1 - fi - - echo "✅ All migrations completed successfully" - else - echo "❌ Health check failed (HTTP $HTTP_CODE)! Rolling back app container..." - if [ "$CURRENT_TAG" != "unknown" ]; then - IMG_TAG=$CURRENT_TAG docker compose up -d api api-local api-test - echo "🔙 Rolled back to $CURRENT_TAG" - fi - exit 1 - fi - - - name: Deployment summary - if: success() - run: | - echo "🎉 Deployment Complete!" - echo "Version: ${{ steps.get_tag.outputs.new_tag }}" - echo "Branch: dev" - echo "Commit: ${{ github.sha }}" + build-and-deploy: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Get latest tag from Docker Hub + id: get_tag + run: | + # Fetch tags from Docker Hub + REPO="${{ secrets.DOCKER_USERNAME }}/api-yapper-backend" + + echo "🔍 Fetching tags from Docker Hub for $REPO..." + + # Get Docker Hub token for authenticated API access + TOKEN=$(curl -s -H "Content-Type: application/json" -X POST \ + -d '{"username": "${{ secrets.DOCKER_USERNAME }}", "password": "${{ secrets.DOCKER_PASSWORD }}"}' \ + https://hub.docker.com/v2/users/login/ | jq -r .token) + + if [ "$TOKEN" = "null" ] || [ -z "$TOKEN" ]; then + echo "❌ Failed to authenticate with Docker Hub" + exit 1 + fi + + # Get all tags from Docker Hub API + TAGS=$(curl -s -H "Authorization: JWT ${TOKEN}" \ + "https://hub.docker.com/v2/repositories/$REPO/tags/?page_size=100" | \ + jq -r '.results[].name' | \ + grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | \ + sort -V | \ + tail -1) + + # If no tags found, start with v0.0.0 + if [ -z "$TAGS" ]; then + LATEST_TAG="v0.0.0" + echo "âš ī¸ No version tags found on Docker Hub, starting from v0.0.0" + else + LATEST_TAG="$TAGS" + echo "đŸ“Ļ Latest tag found on Docker Hub: $LATEST_TAG" + fi + + echo "latest_tag=$LATEST_TAG" >> $GITHUB_OUTPUT + + # Parse version components + VERSION=${LATEST_TAG#v} + IFS='.' read -r MAJOR MINOR PATCH <<< "$VERSION" + + # Increment patch version + PATCH=$((PATCH + 1)) + NEW_TAG="v${MAJOR}.${MINOR}.${PATCH}" + + echo "new_tag=$NEW_TAG" >> $GITHUB_OUTPUT + echo "🚀 New version: $NEW_TAG" + + - name: Log in to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Build and push Docker image + uses: docker/build-push-action@v6 + with: + context: . + file: ./docker/Dockerfile + + push: true + tags: | + ${{ secrets.DOCKER_USERNAME }}/api-yapper-backend:${{ steps.get_tag.outputs.new_tag }} + ${{ secrets.DOCKER_USERNAME }}/api-yapper-backend:dev + + - name: Confirm image push + run: | + echo "✅ Image pushed: ${{ secrets.DOCKER_USERNAME }}/api-yapper-backend:${{ steps.get_tag.outputs.new_tag }}" + echo "✅ Image tagged: ${{ secrets.DOCKER_USERNAME }}/api-yapper-backend:dev" + + - name: SSH into Dev VM and deploy + uses: appleboy/ssh-action@v1.0.0 + env: + IMG_TAG: ${{ steps.get_tag.outputs.new_tag }} + with: + host: ${{ secrets.DEV_SERVER_HOST }} + username: ${{ secrets.DEV_SERVER_USER }} + key: ${{ secrets.DEV_SERVER_SSH_KEY }} + envs: IMG_TAG + script: | + set -e + cd ~/yapper # path to your app on VM + + echo "🧭 Getting current running image tag..." + CURRENT_TAG=$(IMG_TAG="" docker compose ps -q api | xargs docker inspect -f '{{ .Config.Image }}' | cut -d':' -f2 || echo "unknown") + echo "CURRENT_TAG: $CURRENT_TAG" + echo "đŸŗ Setting new image tag for deployment..." + echo "Deploying version: $IMG_TAG" + + echo "🔄 Pulling new image and restarting app container..." + IMG_TAG=$IMG_TAG docker compose pull api api-local api-test + IMG_TAG=$IMG_TAG docker compose up -d api api-local api-test + + echo "âŗ Waiting for health check..." + sleep 10 + + HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" ${{ secrets.DEV_HEALTHCHECK_URL }}) + + # if [ "$HTTP_CODE" = "200" ]; then + # echo "✅ Dev deployment successful for version $IMG_TAG" + + # echo "🔄 Running migrations for all services..." + # MIGRATION_FAILED=0 + + # echo "đŸ“Ļ Running migration for api..." + # docker compose exec -T api npm run migration:run:prod 2>&1 | tee /tmp/api-migration.log + # if grep -q "Error during migration" /tmp/api-migration.log || grep -q "Cannot find module" /tmp/api-migration.log; then + # echo "❌ Migration for api failed!" + # cat /tmp/api-migration.log + # MIGRATION_FAILED=1 + # elif grep -q "No migrations are pending" /tmp/api-migration.log || grep -q "No pending migrations" /tmp/api-migration.log; then + # echo "â„šī¸ No pending migrations for api" + # else + # echo "✅ Migration for api completed successfully" + # fi + + # echo "đŸ“Ļ Running migration for api-local..." + # docker compose exec -T api-local npm run migration:run:prod 2>&1 | tee /tmp/api-local-migration.log + # if grep -q "Error during migration" /tmp/api-local-migration.log || grep -q "Cannot find module" /tmp/api-local-migration.log; then + # echo "❌ Migration for api-local failed!" + # cat /tmp/api-local-migration.log + # MIGRATION_FAILED=1 + # elif grep -q "No migrations are pending" /tmp/api-local-migration.log || grep -q "No pending migrations" /tmp/api-local-migration.log; then + # echo "â„šī¸ No pending migrations for api-local" + # else + # echo "✅ Migration for api-local completed successfully" + # fi + + # echo "đŸ“Ļ Running migration for api-test..." + # docker compose exec -T api-test npm run migration:run:prod 2>&1 | tee /tmp/api-test-migration.log + # if grep -q "Error during migration" /tmp/api-test-migration.log || grep -q "Cannot find module" /tmp/api-test-migration.log; then + # echo "❌ Migration for api-test failed!" + # cat /tmp/api-test-migration.log + # MIGRATION_FAILED=1 + # elif grep -q "No migrations are pending" /tmp/api-test-migration.log || grep -q "No pending migrations" /tmp/api-test-migration.log; then + # echo "â„šī¸ No pending migrations for api-test" + # else + # echo "✅ Migration for api-test completed successfully" + # fi + + # if [ "$MIGRATION_FAILED" = "1" ]; then + # echo "❌ One or more migrations failed! Rolling back..." + # if [ "$CURRENT_TAG" != "unknown" ]; then + # IMG_TAG=$CURRENT_TAG docker compose up -d api api-local api-test + # echo "🔙 Rolled back to $CURRENT_TAG" + # fi + # exit 1 + # fi + + # echo "✅ All migrations completed successfully" + # else + # echo "❌ Health check failed (HTTP $HTTP_CODE)! Rolling back app container..." + # if [ "$CURRENT_TAG" != "unknown" ]; then + # IMG_TAG=$CURRENT_TAG docker compose up -d api api-local api-test + # echo "🔙 Rolled back to $CURRENT_TAG" + # fi + # exit 1 + # fi + + - name: Deployment summary + if: success() + run: | + echo "🎉 Deployment Complete!" + echo "Version: ${{ steps.get_tag.outputs.new_tag }}" + echo "Branch: dev" + echo "Commit: ${{ github.sha }}" diff --git a/.scannerwork/.sonar_lock b/.scannerwork/.sonar_lock new file mode 100644 index 00000000..e69de29b diff --git a/.scannerwork/report-task.txt b/.scannerwork/report-task.txt new file mode 100644 index 00000000..d1276f07 --- /dev/null +++ b/.scannerwork/report-task.txt @@ -0,0 +1,6 @@ +projectKey=x-backend-replica +serverUrl=http://localhost:9000 +serverVersion=25.12.0.117093 +dashboardUrl=http://localhost:9000/dashboard?id=x-backend-replica +ceTaskId=7168e6a5-41aa-42a6-a2a2-124c7e9216b7 +ceTaskUrl=http://localhost:9000/api/ce/task?id=7168e6a5-41aa-42a6-a2a2-124c7e9216b7 diff --git a/dump.rdb b/dump.rdb index 4a63254c..102129bf 100644 Binary files a/dump.rdb and b/dump.rdb differ diff --git a/package-lock.json b/package-lock.json index 750d5f47..5725d2b0 100644 --- a/package-lock.json +++ b/package-lock.json @@ -40,7 +40,7 @@ "class-transformer": "^0.5.1", "class-validator": "^0.14.2", "cookie-parser": "^1.4.7", - "firebase-admin": "^13.6.0", + "expo-server-sdk": "^4.0.0", "fluent-ffmpeg": "^2.1.3", "google-auth-library": "^10.4.1", "groq-sdk": "^0.37.0", @@ -65,6 +65,7 @@ "socket.io": "^4.8.1", "swagger-ui-express": "^5.0.1", "tunnel-ssh": "^5.2.0", + "twitter-text": "^3.1.0", "typeorm": "^0.3.26", "xlsx": "^0.18.5" }, @@ -85,6 +86,7 @@ "@types/passport-github2": "^1.2.9", "@types/supertest": "^6.0.2", "@types/tunnel-ssh": "^5.0.4", + "@types/twitter-text": "^3.1.10", "eslint": "^9.18.0", "eslint-config-prettier": "^10.0.1", "eslint-plugin-prettier": "^5.2.2", @@ -93,6 +95,7 @@ "jest": "^30.0.0", "lint-staged": "^16.2.4", "prettier": "^3.4.2", + "sonarqube-scanner": "^4.3.2", "source-map-support": "^0.5.21", "supertest": "^7.0.0", "ts-jest": "^29.2.5", @@ -435,19 +438,19 @@ } }, "node_modules/@aws-sdk/client-sesv2": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sesv2/-/client-sesv2-3.947.0.tgz", - "integrity": "sha512-XttaaNh2rPf0PrGShFIGh56QyNstKfQ9ozAfE+TGsYsMNok3yxbdZnFub8PrI1boYceJolhK2m6VBk3J5nDkAg==", + "version": "3.952.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sesv2/-/client-sesv2-3.952.0.tgz", + "integrity": "sha512-0avirspZ7/RkHqp9It12xx6UJ2rkO6B6EeNScIgDkgyELl4tGsmF8bhBSPDqeJMZ1HQGYglanzkDRrYFgTN6iA==", "dev": true, "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.947.0", - "@aws-sdk/credential-provider-node": "3.947.0", + "@aws-sdk/credential-provider-node": "3.952.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.936.0", + "@aws-sdk/middleware-recursion-detection": "3.948.0", "@aws-sdk/middleware-user-agent": "3.947.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/signature-v4-multi-region": "3.947.0", @@ -487,9 +490,9 @@ } }, "node_modules/@aws-sdk/client-sso": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.947.0.tgz", - "integrity": "sha512-sDwcO8SP290WSErY1S8pz8hTafeghKmmWjNVks86jDK30wx62CfazOTeU70IpWgrUBEygyXk/zPogHsUMbW2Rg==", + "version": "3.948.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.948.0.tgz", + "integrity": "sha512-iWjchXy8bIAVBUsKnbfKYXRwhLgRg3EqCQ5FTr3JbR+QR75rZm4ZOYXlvHGztVTmtAZ+PQVA1Y4zO7v7N87C0A==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -498,7 +501,7 @@ "@aws-sdk/core": "3.947.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.936.0", + "@aws-sdk/middleware-recursion-detection": "3.948.0", "@aws-sdk/middleware-user-agent": "3.947.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", @@ -601,20 +604,20 @@ } }, "node_modules/@aws-sdk/credential-provider-ini": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.947.0.tgz", - "integrity": "sha512-A2ZUgJUJZERjSzvCi2NR/hBVbVkTXPD0SdKcR/aITb30XwF+n3T963b+pJl90qhOspoy7h0IVYNR7u5Nr9tJdQ==", + "version": "3.952.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.952.0.tgz", + "integrity": "sha512-N5B15SwzMkZ8/LLopNksTlPEWWZn5tbafZAUfMY5Xde4rSHGWmv5H/ws2M3P8L0X77E2wKnOJsNmu+GsArBreQ==", "dev": true, "license": "Apache-2.0", "dependencies": { "@aws-sdk/core": "3.947.0", "@aws-sdk/credential-provider-env": "3.947.0", "@aws-sdk/credential-provider-http": "3.947.0", - "@aws-sdk/credential-provider-login": "3.947.0", + "@aws-sdk/credential-provider-login": "3.952.0", "@aws-sdk/credential-provider-process": "3.947.0", - "@aws-sdk/credential-provider-sso": "3.947.0", - "@aws-sdk/credential-provider-web-identity": "3.947.0", - "@aws-sdk/nested-clients": "3.947.0", + "@aws-sdk/credential-provider-sso": "3.952.0", + "@aws-sdk/credential-provider-web-identity": "3.952.0", + "@aws-sdk/nested-clients": "3.952.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", @@ -627,14 +630,14 @@ } }, "node_modules/@aws-sdk/credential-provider-login": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.947.0.tgz", - "integrity": "sha512-u7M3hazcB7aJiVwosNdJRbIJDzbwQ861NTtl6S0HmvWpixaVb7iyhJZWg8/plyUznboZGBm7JVEdxtxv3u0bTA==", + "version": "3.952.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.952.0.tgz", + "integrity": "sha512-jL9zc+e+7sZeJrHzYKK9GOjl1Ktinh0ORU3cM2uRBi7fuH/0zV9pdMN8PQnGXz0i4tJaKcZ1lrE4V0V6LB9NQg==", "dev": true, "license": "Apache-2.0", "dependencies": { "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.947.0", + "@aws-sdk/nested-clients": "3.952.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/protocol-http": "^5.3.5", @@ -647,18 +650,18 @@ } }, "node_modules/@aws-sdk/credential-provider-node": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.947.0.tgz", - "integrity": "sha512-S0Zqebr71KyrT6J4uYPhwV65g4V5uDPHnd7dt2W34FcyPu+hVC7Hx4MFmsPyVLeT5cMCkkZvmY3kAoEzgUPJJg==", + "version": "3.952.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.952.0.tgz", + "integrity": "sha512-pj7nidLrb3Dz9llcUPh6N0Yv1dBYTS9xJqi8u0kI8D5sn72HJMB+fIOhcDQVXXAw/dpVolOAH9FOAbog5JDAMg==", "dev": true, "license": "Apache-2.0", "dependencies": { "@aws-sdk/credential-provider-env": "3.947.0", "@aws-sdk/credential-provider-http": "3.947.0", - "@aws-sdk/credential-provider-ini": "3.947.0", + "@aws-sdk/credential-provider-ini": "3.952.0", "@aws-sdk/credential-provider-process": "3.947.0", - "@aws-sdk/credential-provider-sso": "3.947.0", - "@aws-sdk/credential-provider-web-identity": "3.947.0", + "@aws-sdk/credential-provider-sso": "3.952.0", + "@aws-sdk/credential-provider-web-identity": "3.952.0", "@aws-sdk/types": "3.936.0", "@smithy/credential-provider-imds": "^4.2.5", "@smithy/property-provider": "^4.2.5", @@ -689,15 +692,15 @@ } }, "node_modules/@aws-sdk/credential-provider-sso": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.947.0.tgz", - "integrity": "sha512-NktnVHTGaUMaozxycYrepvb3yfFquHTQ53lt6hBEVjYBzK3C4tVz0siUpr+5RMGLSiZ5bLBp2UjJPgwx4i4waQ==", + "version": "3.952.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.952.0.tgz", + "integrity": "sha512-1CQdP5RzxeXuEfytbAD5TgreY1c9OacjtCdO8+n9m05tpzBABoNBof0hcjzw1dtrWFH7deyUgfwCl1TAN3yBWQ==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@aws-sdk/client-sso": "3.947.0", + "@aws-sdk/client-sso": "3.948.0", "@aws-sdk/core": "3.947.0", - "@aws-sdk/token-providers": "3.947.0", + "@aws-sdk/token-providers": "3.952.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", @@ -709,14 +712,14 @@ } }, "node_modules/@aws-sdk/credential-provider-web-identity": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.947.0.tgz", - "integrity": "sha512-gokm/e/YHiHLrZgLq4j8tNAn8RJDPbIcglFRKgy08q8DmAqHQ8MXAKW3eS0QjAuRXU9mcMmUo1NrX6FRNBCCPw==", + "version": "3.952.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.952.0.tgz", + "integrity": "sha512-5hJbfaZdHDAP8JlwplNbXJAat9Vv7L0AbTZzkbPIgjHhC3vrMf5r3a6I1HWFp5i5pXo7J45xyuf5uQGZJxJlCg==", "dev": true, "license": "Apache-2.0", "dependencies": { "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.947.0", + "@aws-sdk/nested-clients": "3.952.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", @@ -759,14 +762,14 @@ } }, "node_modules/@aws-sdk/middleware-recursion-detection": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.936.0.tgz", - "integrity": "sha512-l4aGbHpXM45YNgXggIux1HgsCVAvvBoqHPkqLnqMl9QVapfuSTjJHfDYDsx1Xxct6/m7qSMUzanBALhiaGO2fA==", + "version": "3.948.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.948.0.tgz", + "integrity": "sha512-Qa8Zj+EAqA0VlAVvxpRnpBpIWJI9KUwaioY1vkeNVwXPlNaz9y9zCKVM9iU9OZ5HXpoUg6TnhATAHXHAE8+QsQ==", "dev": true, "license": "Apache-2.0", "dependencies": { "@aws-sdk/types": "3.936.0", - "@aws/lambda-invoke-store": "^0.2.0", + "@aws/lambda-invoke-store": "^0.2.2", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" @@ -821,9 +824,9 @@ } }, "node_modules/@aws-sdk/nested-clients": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.947.0.tgz", - "integrity": "sha512-DjRJEYNnHUTu9kGPPQDTSXquwSEd6myKR4ssI4FaYLFhdT3ldWpj73yYt807H3tdmhS7vPmdVqchSJnjurUQAw==", + "version": "3.952.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.952.0.tgz", + "integrity": "sha512-OtuirjxuOqZyDcI0q4WtoyWfkq3nSnbH41JwJQsXJefduWcww1FQe5TL1JfYCU7seUxHzK8rg2nFxUBuqUlZtg==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -832,7 +835,7 @@ "@aws-sdk/core": "3.947.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.936.0", + "@aws-sdk/middleware-recursion-detection": "3.948.0", "@aws-sdk/middleware-user-agent": "3.947.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", @@ -906,14 +909,14 @@ } }, "node_modules/@aws-sdk/token-providers": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.947.0.tgz", - "integrity": "sha512-X/DyB8GuK44rsE89Tn5+s542B3PhGbXQSgV8lvqHDzvicwCt0tWny6790st6CPETrVVV2K3oJMfG5U3/jAmaZA==", + "version": "3.952.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.952.0.tgz", + "integrity": "sha512-IpQVC9WOeXQlCEcFVNXWDIKy92CH1Az37u9K0H3DF/HT56AjhyDVKQQfHUy00nt7bHFe3u0K5+zlwErBeKy5ZA==", "dev": true, "license": "Apache-2.0", "dependencies": { "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.947.0", + "@aws-sdk/nested-clients": "3.952.0", "@aws-sdk/types": "3.936.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", @@ -1353,7 +1356,6 @@ "integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.5", @@ -1788,7 +1790,6 @@ "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz", "integrity": "sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==", "license": "MIT", - "optional": true, "engines": { "node": ">=6.9.0" } @@ -2079,7 +2080,6 @@ "resolved": "https://registry.npmjs.org/@elastic/elasticsearch/-/elasticsearch-8.19.1.tgz", "integrity": "sha512-+1j9NnQVOX+lbWB8LhCM7IkUmjU05Y4+BmSLfusq0msCsQb1Va+OUKFCoOXjCJqQrcgdRdQCjYYyolQ/npQALQ==", "license": "Apache-2.0", - "peer": true, "dependencies": { "@elastic/transport": "^8.9.6", "apache-arrow": "18.x - 21.x", @@ -2262,9 +2262,9 @@ } }, "node_modules/@eslint/js": { - "version": "9.39.1", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.1.tgz", - "integrity": "sha512-S26Stp4zCy88tH94QbBv3XCuzRQiZ9yXofEILmglYTh/Ug/a9/umqvgFtYBAo3Lp0nsI/5/qH1CCrbdK3AP1Tw==", + "version": "9.39.2", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.2.tgz", + "integrity": "sha512-q1mjIoW1VX4IvSocvM/vbTiveKC4k9eLrajNEuSsmjymSDEbpGddtpfOoN7YGAqBK3NG+uqo8ia4PDTt8buCYA==", "dev": true, "license": "MIT", "engines": { @@ -2298,12 +2298,6 @@ "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, - "node_modules/@fastify/busboy": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-3.2.0.tgz", - "integrity": "sha512-m9FVDXU3GT2ITSe0UaMA5rU3QkfC/UXtCU8y0gSN/GugTqtVldOBWIB5V6V3sbmenVZUIpU6f+mPEO2+m5iTaA==", - "license": "MIT" - }, "node_modules/@ffmpeg-installer/darwin-arm64": { "version": "4.1.5", "resolved": "https://registry.npmjs.org/@ffmpeg-installer/darwin-arm64/-/darwin-arm64-4.1.5.tgz", @@ -2430,107 +2424,6 @@ "win32" ] }, - "node_modules/@firebase/app-check-interop-types": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@firebase/app-check-interop-types/-/app-check-interop-types-0.3.3.tgz", - "integrity": "sha512-gAlxfPLT2j8bTI/qfe3ahl2I2YcBQ8cFIBdhAQA4I2f3TndcO+22YizyGYuttLHPQEpWkhmpFW60VCFEPg4g5A==", - "license": "Apache-2.0" - }, - "node_modules/@firebase/app-types": { - "version": "0.9.3", - "resolved": "https://registry.npmjs.org/@firebase/app-types/-/app-types-0.9.3.tgz", - "integrity": "sha512-kRVpIl4vVGJ4baogMDINbyrIOtOxqhkZQg4jTq3l8Lw6WSk0xfpEYzezFu+Kl4ve4fbPl79dvwRtaFqAC/ucCw==", - "license": "Apache-2.0" - }, - "node_modules/@firebase/auth-interop-types": { - "version": "0.2.4", - "resolved": "https://registry.npmjs.org/@firebase/auth-interop-types/-/auth-interop-types-0.2.4.tgz", - "integrity": "sha512-JPgcXKCuO+CWqGDnigBtvo09HeBs5u/Ktc2GaFj2m01hLarbxthLNm7Fk8iOP1aqAtXV+fnnGj7U28xmk7IwVA==", - "license": "Apache-2.0" - }, - "node_modules/@firebase/component": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/@firebase/component/-/component-0.7.0.tgz", - "integrity": "sha512-wR9En2A+WESUHexjmRHkqtaVH94WLNKt6rmeqZhSLBybg4Wyf0Umk04SZsS6sBq4102ZsDBFwoqMqJYj2IoDSg==", - "license": "Apache-2.0", - "dependencies": { - "@firebase/util": "1.13.0", - "tslib": "^2.1.0" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@firebase/database": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@firebase/database/-/database-1.1.0.tgz", - "integrity": "sha512-gM6MJFae3pTyNLoc9VcJNuaUDej0ctdjn3cVtILo3D5lpp0dmUHHLFN/pUKe7ImyeB1KAvRlEYxvIHNF04Filg==", - "license": "Apache-2.0", - "dependencies": { - "@firebase/app-check-interop-types": "0.3.3", - "@firebase/auth-interop-types": "0.2.4", - "@firebase/component": "0.7.0", - "@firebase/logger": "0.5.0", - "@firebase/util": "1.13.0", - "faye-websocket": "0.11.4", - "tslib": "^2.1.0" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@firebase/database-compat": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@firebase/database-compat/-/database-compat-2.1.0.tgz", - "integrity": "sha512-8nYc43RqxScsePVd1qe1xxvWNf0OBnbwHxmXJ7MHSuuTVYFO3eLyLW3PiCKJ9fHnmIz4p4LbieXwz+qtr9PZDg==", - "license": "Apache-2.0", - "dependencies": { - "@firebase/component": "0.7.0", - "@firebase/database": "1.1.0", - "@firebase/database-types": "1.0.16", - "@firebase/logger": "0.5.0", - "@firebase/util": "1.13.0", - "tslib": "^2.1.0" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@firebase/database-types": { - "version": "1.0.16", - "resolved": "https://registry.npmjs.org/@firebase/database-types/-/database-types-1.0.16.tgz", - "integrity": "sha512-xkQLQfU5De7+SPhEGAXFBnDryUWhhlFXelEg2YeZOQMCdoe7dL64DDAd77SQsR+6uoXIZY5MB4y/inCs4GTfcw==", - "license": "Apache-2.0", - "dependencies": { - "@firebase/app-types": "0.9.3", - "@firebase/util": "1.13.0" - } - }, - "node_modules/@firebase/logger": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/@firebase/logger/-/logger-0.5.0.tgz", - "integrity": "sha512-cGskaAvkrnh42b3BA3doDWeBmuHFO/Mx5A83rbRDYakPjO9bJtRL3dX7javzc2Rr/JHZf4HlterTW2lUkfeN4g==", - "license": "Apache-2.0", - "dependencies": { - "tslib": "^2.1.0" - }, - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/@firebase/util": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/@firebase/util/-/util-1.13.0.tgz", - "integrity": "sha512-0AZUyYUfpMNcztR5l09izHwXkZpghLgCUaAGjtMwXnCg3bj4ml5VgiwqOMOxJ+Nw4qN/zJAaOQBcJ7KGkWStqQ==", - "hasInstallScript": true, - "license": "Apache-2.0", - "dependencies": { - "tslib": "^2.1.0" - }, - "engines": { - "node": ">=20.0.0" - } - }, "node_modules/@golevelup/nestjs-discovery": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/@golevelup/nestjs-discovery/-/nestjs-discovery-5.0.0.tgz", @@ -2544,227 +2437,6 @@ "@nestjs/core": "^11.0.20" } }, - "node_modules/@google-cloud/firestore": { - "version": "7.11.6", - "resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.11.6.tgz", - "integrity": "sha512-EW/O8ktzwLfyWBOsNuhRoMi8lrC3clHM5LVFhGvO1HCsLozCOOXRAlHrYBoE6HL42Sc8yYMuCb2XqcnJ4OOEpw==", - "license": "Apache-2.0", - "optional": true, - "dependencies": { - "@opentelemetry/api": "^1.3.0", - "fast-deep-equal": "^3.1.1", - "functional-red-black-tree": "^1.0.1", - "google-gax": "^4.3.3", - "protobufjs": "^7.2.6" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/@google-cloud/paginator": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-5.0.2.tgz", - "integrity": "sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg==", - "license": "Apache-2.0", - "optional": true, - "dependencies": { - "arrify": "^2.0.0", - "extend": "^3.0.2" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/@google-cloud/projectify": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz", - "integrity": "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==", - "license": "Apache-2.0", - "optional": true, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/@google-cloud/promisify": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.0.0.tgz", - "integrity": "sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g==", - "license": "Apache-2.0", - "optional": true, - "engines": { - "node": ">=14" - } - }, - "node_modules/@google-cloud/storage": { - "version": "7.18.0", - "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.18.0.tgz", - "integrity": "sha512-r3ZwDMiz4nwW6R922Z1pwpePxyRwE5GdevYX63hRmAQUkUQJcBH/79EnQPDv5cOv1mFBgevdNWQfi3tie3dHrQ==", - "license": "Apache-2.0", - "optional": true, - "dependencies": { - "@google-cloud/paginator": "^5.0.0", - "@google-cloud/projectify": "^4.0.0", - "@google-cloud/promisify": "<4.1.0", - "abort-controller": "^3.0.0", - "async-retry": "^1.3.3", - "duplexify": "^4.1.3", - "fast-xml-parser": "^4.4.1", - "gaxios": "^6.0.2", - "google-auth-library": "^9.6.3", - "html-entities": "^2.5.2", - "mime": "^3.0.0", - "p-limit": "^3.0.1", - "retry-request": "^7.0.0", - "teeny-request": "^9.0.0", - "uuid": "^8.0.0" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/@google-cloud/storage/node_modules/fast-xml-parser": { - "version": "4.5.3", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.3.tgz", - "integrity": "sha512-RKihhV+SHsIUGXObeVy9AXiBbFwkVk7Syp8XgwN5U3JV416+Gwp/GO9i0JYKmikykgz/UHRrrV4ROuZEo/T0ig==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/NaturalIntelligence" - } - ], - "license": "MIT", - "optional": true, - "dependencies": { - "strnum": "^1.1.1" - }, - "bin": { - "fxparser": "src/cli/cli.js" - } - }, - "node_modules/@google-cloud/storage/node_modules/gcp-metadata": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.1.tgz", - "integrity": "sha512-a4tiq7E0/5fTjxPAaH4jpjkSv/uCaU2p5KC6HVGrvl0cDjA8iBZv4vv1gyzlmK0ZUKqwpOyQMKzZQe3lTit77A==", - "license": "Apache-2.0", - "optional": true, - "dependencies": { - "gaxios": "^6.1.1", - "google-logging-utils": "^0.0.2", - "json-bigint": "^1.0.0" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/@google-cloud/storage/node_modules/google-auth-library": { - "version": "9.15.1", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.15.1.tgz", - "integrity": "sha512-Jb6Z0+nvECVz+2lzSMt9u98UsoakXxA2HGHMCxh+so3n90XgYWkq5dur19JAJV7ONiJY22yBTyJB1TSkvPq9Ng==", - "license": "Apache-2.0", - "optional": true, - "dependencies": { - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "gaxios": "^6.1.1", - "gcp-metadata": "^6.1.0", - "gtoken": "^7.0.0", - "jws": "^4.0.0" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/@google-cloud/storage/node_modules/google-logging-utils": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/google-logging-utils/-/google-logging-utils-0.0.2.tgz", - "integrity": "sha512-NEgUnEcBiP5HrPzufUkBzJOD/Sxsco3rLNo1F1TNf7ieU8ryUzBhqba8r756CjLX7rn3fHl6iLEwPYuqpoKgQQ==", - "license": "Apache-2.0", - "optional": true, - "engines": { - "node": ">=14" - } - }, - "node_modules/@google-cloud/storage/node_modules/gtoken": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz", - "integrity": "sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==", - "license": "MIT", - "optional": true, - "dependencies": { - "gaxios": "^6.0.0", - "jws": "^4.0.0" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/@google-cloud/storage/node_modules/strnum": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.1.2.tgz", - "integrity": "sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/NaturalIntelligence" - } - ], - "license": "MIT", - "optional": true - }, - "node_modules/@grpc/grpc-js": { - "version": "1.14.2", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.14.2.tgz", - "integrity": "sha512-QzVUtEFyu05UNx2xr0fCQmStUO17uVQhGNowtxs00IgTZT6/W2PBLfUkj30s0FKJ29VtTa3ArVNIhNP6akQhqA==", - "license": "Apache-2.0", - "optional": true, - "peer": true, - "dependencies": { - "@grpc/proto-loader": "^0.8.0", - "@js-sdsl/ordered-map": "^4.4.2" - }, - "engines": { - "node": ">=12.10.0" - } - }, - "node_modules/@grpc/grpc-js/node_modules/@grpc/proto-loader": { - "version": "0.8.0", - "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.8.0.tgz", - "integrity": "sha512-rc1hOQtjIWGxcxpb9aHAfLpIctjEnsDehj0DAiVfBlmT84uvR0uUtN2hEi/ecvWVjXUGf5qPF4qEgiLOx1YIMQ==", - "license": "Apache-2.0", - "optional": true, - "dependencies": { - "lodash.camelcase": "^4.3.0", - "long": "^5.0.0", - "protobufjs": "^7.5.3", - "yargs": "^17.7.2" - }, - "bin": { - "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/@grpc/proto-loader": { - "version": "0.7.15", - "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.15.tgz", - "integrity": "sha512-tMXdRCfYVixjuFK+Hk0Q1s38gV9zDiDJfWL3h1rv4Qc39oILCu1TRTDt7+fGUI8K4G1Fj125Hx/ru3azECWTyQ==", - "license": "Apache-2.0", - "optional": true, - "peer": true, - "dependencies": { - "lodash.camelcase": "^4.3.0", - "long": "^5.0.0", - "protobufjs": "^7.2.5", - "yargs": "^17.7.2" - }, - "bin": { - "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/@humanfs/core": { "version": "0.19.1", "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", @@ -4248,17 +3920,6 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, - "node_modules/@js-sdsl/ordered-map": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz", - "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==", - "license": "MIT", - "optional": true, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/js-sdsl" - } - }, "node_modules/@lukeed/csprng": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@lukeed/csprng/-/csprng-1.1.0.tgz", @@ -4275,9 +3936,9 @@ "license": "MIT" }, "node_modules/@mongodb-js/saslprep": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.3.2.tgz", - "integrity": "sha512-QgA5AySqB27cGTXBFmnpifAi7HxoGUeezwo6p9dI03MuDB6Pp33zgclqVb6oVK3j6I9Vesg0+oojW2XxB59SGg==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.4.0.tgz", + "integrity": "sha512-ZHzx7Z3rdlWL1mECydvpryWN/ETXJiCxdgQKTAH+djzIPe77HdnSizKBDi1TVDXZjXyOj2IqEG/vPw71ULF06w==", "license": "MIT", "dependencies": { "sparse-bitfield": "^3.0.3" @@ -4509,6 +4170,7 @@ ], "license": "MIT", "optional": true, + "peer": true, "bin": { "uuid": "dist/bin/uuid" } @@ -4722,7 +4384,6 @@ "resolved": "https://registry.npmjs.org/@nestjs/common/-/common-11.1.9.tgz", "integrity": "sha512-zDntUTReRbAThIfSp3dQZ9kKqI+LjgLp5YZN5c1bgNRDuoeLySAoZg46Bg1a+uV8TMgIRziHocglKGNzr6l+bQ==", "license": "MIT", - "peer": true, "dependencies": { "file-type": "21.1.0", "iterare": "1.2.1", @@ -4770,7 +4431,6 @@ "integrity": "sha512-a00B0BM4X+9z+t3UxJqIZlemIwCQdYoPKrMcM+ky4z3pkqqG1eTWexjs+YXpGObnLnjtMPVKWlcZHp3adDYvUw==", "hasInstallScript": true, "license": "MIT", - "peer": true, "dependencies": { "@nuxt/opencollective": "0.4.1", "fast-safe-stringify": "2.1.1", @@ -4851,14 +4511,14 @@ } }, "node_modules/@nestjs/mongoose": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/@nestjs/mongoose/-/mongoose-11.0.3.tgz", - "integrity": "sha512-tg7bbKD4MnNMPaiDLXK/JUyTNQxIn3rNnI+oYU1HorLpNiR2E8vPraWVvfptpIj+zferpT6LkrHMvtqvuIKNPw==", + "version": "11.0.4", + "resolved": "https://registry.npmjs.org/@nestjs/mongoose/-/mongoose-11.0.4.tgz", + "integrity": "sha512-LUOlUeSOfbjdIu22QwOmczv2CzJQr9LUBo2mOfbXrGCu2svpr5Hiu71zBFrb/9UC+H8BjGMKbBOq1nEbMF6ZJA==", "license": "MIT", "peerDependencies": { "@nestjs/common": "^10.0.0 || ^11.0.0", "@nestjs/core": "^10.0.0 || ^11.0.0", - "mongoose": "^7.0.0 || ^8.0.0", + "mongoose": "^7.0.0 || ^8.0.0 || ^9.0.0", "rxjs": "^7.0.0" } }, @@ -4877,7 +4537,6 @@ "resolved": "https://registry.npmjs.org/@nestjs/platform-express/-/platform-express-11.1.9.tgz", "integrity": "sha512-GVd3+0lO0mJq2m1kl9hDDnVrX3Nd4oH3oDfklz0pZEVEVS0KVSp63ufHq2Lu9cyPdSBuelJr9iPm2QQ1yX+Kmw==", "license": "MIT", - "peer": true, "dependencies": { "cors": "2.8.5", "express": "5.1.0", @@ -4950,7 +4609,6 @@ "resolved": "https://registry.npmjs.org/@nestjs/platform-socket.io/-/platform-socket.io-11.1.9.tgz", "integrity": "sha512-OaAW+voXo5BXbFKd9Ot3SL05tEucRMhZRdw5wdWZf/RpIl9hB6G6OHr8DDxNbUGvuQWzNnZHCDHx3EQJzjcIyA==", "license": "MIT", - "peer": true, "dependencies": { "socket.io": "4.8.1", "tslib": "2.8.1" @@ -5155,7 +4813,6 @@ "resolved": "https://registry.npmjs.org/@nestjs/websockets/-/websockets-11.1.9.tgz", "integrity": "sha512-kkkdeTVcc3X7ZzvVqUVpOAJoh49kTRUjWNUXo5jmG+27OvZoHfs/vuSiqxidrrbIgydSqN15HUsf1wZwQUrxCQ==", "license": "MIT", - "peer": true, "dependencies": { "iterare": "1.2.1", "object-hash": "3.0.0", @@ -5215,7 +4872,6 @@ "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==", "license": "Apache-2.0", - "peer": true, "engines": { "node": ">=8.0.0" } @@ -5277,92 +4933,18 @@ "url": "https://opencollective.com/pkgr" } }, - "node_modules/@protobufjs/aspromise": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", - "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==", - "license": "BSD-3-Clause", - "optional": true - }, - "node_modules/@protobufjs/base64": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", - "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==", - "license": "BSD-3-Clause", - "optional": true - }, - "node_modules/@protobufjs/codegen": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", - "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==", - "license": "BSD-3-Clause", - "optional": true - }, - "node_modules/@protobufjs/eventemitter": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", - "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==", - "license": "BSD-3-Clause", - "optional": true + "node_modules/@scarf/scarf": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@scarf/scarf/-/scarf-1.4.0.tgz", + "integrity": "sha512-xxeapPiUXdZAE3che6f3xogoJPeZgig6omHEy1rIY5WVsB3H2BHNnZH+gHG6x91SCWyQCzWGsuL2Hh3ClO5/qQ==", + "hasInstallScript": true, + "license": "Apache-2.0" }, - "node_modules/@protobufjs/fetch": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", - "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", - "license": "BSD-3-Clause", - "optional": true, - "dependencies": { - "@protobufjs/aspromise": "^1.1.1", - "@protobufjs/inquire": "^1.1.0" - } - }, - "node_modules/@protobufjs/float": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", - "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==", - "license": "BSD-3-Clause", - "optional": true - }, - "node_modules/@protobufjs/inquire": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", - "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==", - "license": "BSD-3-Clause", - "optional": true - }, - "node_modules/@protobufjs/path": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", - "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==", - "license": "BSD-3-Clause", - "optional": true - }, - "node_modules/@protobufjs/pool": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", - "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==", - "license": "BSD-3-Clause", - "optional": true - }, - "node_modules/@protobufjs/utf8": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", - "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==", - "license": "BSD-3-Clause", - "optional": true - }, - "node_modules/@scarf/scarf": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@scarf/scarf/-/scarf-1.4.0.tgz", - "integrity": "sha512-xxeapPiUXdZAE3che6f3xogoJPeZgig6omHEy1rIY5WVsB3H2BHNnZH+gHG6x91SCWyQCzWGsuL2Hh3ClO5/qQ==", - "hasInstallScript": true, - "license": "Apache-2.0" - }, - "node_modules/@schematics/angular": { - "version": "19.2.19", - "resolved": "https://registry.npmjs.org/@schematics/angular/-/angular-19.2.19.tgz", - "integrity": "sha512-6/0pvbPCY4UHeB4lnM/5r250QX5gcLgOYbR5FdhFu+22mOPHfWpRc5tNuY9kCephDHzAHjo6fTW1vefOOmA4jw==", - "license": "MIT", + "node_modules/@schematics/angular": { + "version": "19.2.19", + "resolved": "https://registry.npmjs.org/@schematics/angular/-/angular-19.2.19.tgz", + "integrity": "sha512-6/0pvbPCY4UHeB4lnM/5r250QX5gcLgOYbR5FdhFu+22mOPHfWpRc5tNuY9kCephDHzAHjo6fTW1vefOOmA4jw==", + "license": "MIT", "dependencies": { "@angular-devkit/core": "19.2.19", "@angular-devkit/schematics": "19.2.19", @@ -5416,13 +4998,13 @@ } }, "node_modules/@smithy/abort-controller": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.2.5.tgz", - "integrity": "sha512-j7HwVkBw68YW8UmFRcjZOmssE77Rvk0GWAIN1oFBhsaovQmZWYCIcGa9/pwRB0ExI8Sk9MWNALTjftjHZea7VA==", + "version": "4.2.6", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.2.6.tgz", + "integrity": "sha512-P7JD4J+wxHMpGxqIg6SHno2tPkZbBUBLbPpR5/T1DEUvw/mEaINBMaPFZNM7lA+ToSCZ36j6nMHa+5kej+fhGg==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -5430,17 +5012,17 @@ } }, "node_modules/@smithy/config-resolver": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.3.tgz", - "integrity": "sha512-ezHLe1tKLUxDJo2LHtDuEDyWXolw8WGOR92qb4bQdWq/zKenO5BvctZGrVJBK08zjezSk7bmbKFOXIVyChvDLw==", + "version": "4.4.4", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.4.tgz", + "integrity": "sha512-s3U5ChS21DwU54kMmZ0UJumoS5cg0+rGVZvN6f5Lp6EbAVi0ZyP+qDSHdewfmXKUgNK1j3z45JyzulkDukrjAA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/types": "^4.10.0", "@smithy/util-config-provider": "^4.2.0", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-endpoints": "^3.2.6", + "@smithy/util-middleware": "^4.2.6", "tslib": "^2.6.2" }, "engines": { @@ -5448,19 +5030,19 @@ } }, "node_modules/@smithy/core": { - "version": "3.18.7", - "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.18.7.tgz", - "integrity": "sha512-axG9MvKhMWOhFbvf5y2DuyTxQueO0dkedY9QC3mAfndLosRI/9LJv8WaL0mw7ubNhsO4IuXX9/9dYGPFvHrqlw==", + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.19.0.tgz", + "integrity": "sha512-Y9oHXpBcXQgYHOcAEmxjkDilUbSTkgKjoHYed3WaYUH8jngq8lPWDBSpjHblJ9uOgBdy5mh3pzebrScDdYr29w==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/middleware-serde": "^4.2.6", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-stream": "^4.5.7", "@smithy/util-utf8": "^4.2.0", "@smithy/uuid": "^1.1.0", "tslib": "^2.6.2" @@ -5470,16 +5052,16 @@ } }, "node_modules/@smithy/credential-provider-imds": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.5.tgz", - "integrity": "sha512-BZwotjoZWn9+36nimwm/OLIcVe+KYRwzMjfhd4QT7QxPm9WY0HiOV8t/Wlh+HVUif0SBVV7ksq8//hPaBC/okQ==", + "version": "4.2.6", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.6.tgz", + "integrity": "sha512-xBmawExyTzOjbhzkZwg+vVm/khg28kG+rj2sbGlULjFd1jI70sv/cbpaR0Ev4Yfd6CpDUDRMe64cTqR//wAOyA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", "tslib": "^2.6.2" }, "engines": { @@ -5487,15 +5069,15 @@ } }, "node_modules/@smithy/fetch-http-handler": { - "version": "5.3.6", - "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.6.tgz", - "integrity": "sha512-3+RG3EA6BBJ/ofZUeTFJA7mHfSYrZtQIrDP9dI8Lf7X6Jbos2jptuLrAAteDiFVrmbEmLSuRG/bUKzfAXk7dhg==", + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.7.tgz", + "integrity": "sha512-fcVap4QwqmzQwQK9QU3keeEpCzTjnP9NJ171vI7GnD7nbkAIcP9biZhDUx88uRH9BabSsQDS0unUps88uZvFIQ==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/querystring-builder": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/querystring-builder": "^4.2.6", + "@smithy/types": "^4.10.0", "@smithy/util-base64": "^4.3.0", "tslib": "^2.6.2" }, @@ -5504,13 +5086,13 @@ } }, "node_modules/@smithy/hash-node": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.5.tgz", - "integrity": "sha512-DpYX914YOfA3UDT9CN1BM787PcHfWRBB43fFGCYrZFUH0Jv+5t8yYl+Pd5PW4+QzoGEDvn5d5QIO4j2HyYZQSA==", + "version": "4.2.6", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.6.tgz", + "integrity": "sha512-k3Dy9VNR37wfMh2/1RHkFf/e0rMyN0pjY0FdyY6ItJRjENYyVPRMwad6ZR1S9HFm6tTuIOd9pqKBmtJ4VHxvxg==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "@smithy/util-buffer-from": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" @@ -5520,13 +5102,13 @@ } }, "node_modules/@smithy/invalid-dependency": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.5.tgz", - "integrity": "sha512-2L2erASEro1WC5nV+plwIMxrTXpvpfzl4e+Nre6vBVRR2HKeGGcvpJyyL3/PpiSg+cJG2KpTmZmq934Olb6e5A==", + "version": "4.2.6", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.6.tgz", + "integrity": "sha512-E4t/V/q2T46RY21fpfznd1iSLTvCXKNKo4zJ1QuEFN4SE9gKfu2vb6bgq35LpufkQ+SETWIC7ZAf2GGvTlBaMQ==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -5547,14 +5129,14 @@ } }, "node_modules/@smithy/middleware-content-length": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.5.tgz", - "integrity": "sha512-Y/RabVa5vbl5FuHYV2vUCwvh/dqzrEY/K2yWPSqvhFUwIY0atLqO4TienjBXakoy4zrKAMCZwg+YEqmH7jaN7A==", + "version": "4.2.6", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.6.tgz", + "integrity": "sha512-0cjqjyfj+Gls30ntq45SsBtqF3dfJQCeqQPyGz58Pk8OgrAr5YiB7ZvDzjCA94p4r6DCI4qLm7FKobqBjf515w==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -5562,19 +5144,19 @@ } }, "node_modules/@smithy/middleware-endpoint": { - "version": "4.3.14", - "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.3.14.tgz", - "integrity": "sha512-v0q4uTKgBM8dsqGjqsabZQyH85nFaTnFcgpWU1uydKFsdyyMzfvOkNum9G7VK+dOP01vUnoZxIeRiJ6uD0kjIg==", + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.4.0.tgz", + "integrity": "sha512-M6qWfUNny6NFNy8amrCGIb9TfOMUkHVtg9bHtEFGRgfH7A7AtPpn/fcrToGPjVDK1ECuMVvqGQOXcZxmu9K+7A==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.18.7", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", - "@smithy/util-middleware": "^4.2.5", + "@smithy/core": "^3.19.0", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", + "@smithy/util-middleware": "^4.2.6", "tslib": "^2.6.2" }, "engines": { @@ -5582,19 +5164,19 @@ } }, "node_modules/@smithy/middleware-retry": { - "version": "4.4.14", - "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.4.14.tgz", - "integrity": "sha512-Z2DG8Ej7FyWG1UA+7HceINtSLzswUgs2np3sZX0YBBxCt+CXG4QUxv88ZDS3+2/1ldW7LqtSY1UO/6VQ1pND8Q==", + "version": "4.4.16", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.4.16.tgz", + "integrity": "sha512-XPpNhNRzm3vhYm7YCsyw3AtmWggJbg1wNGAoqb7NBYr5XA5isMRv14jgbYyUV6IvbTBFZQdf2QpeW43LrRdStQ==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/service-error-classification": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/service-error-classification": "^4.2.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-retry": "^4.2.6", "@smithy/uuid": "^1.1.0", "tslib": "^2.6.2" }, @@ -5603,14 +5185,14 @@ } }, "node_modules/@smithy/middleware-serde": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.6.tgz", - "integrity": "sha512-VkLoE/z7e2g8pirwisLz8XJWedUSY8my/qrp81VmAdyrhi94T+riBfwP+AOEEFR9rFTSonC/5D2eWNmFabHyGQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.7.tgz", + "integrity": "sha512-PFMVHVPgtFECeu4iZ+4SX6VOQT0+dIpm4jSPLLL6JLSkp9RohGqKBKD0cbiXdeIFS08Forp0UHI6kc0gIHenSA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -5618,13 +5200,13 @@ } }, "node_modules/@smithy/middleware-stack": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.5.tgz", - "integrity": "sha512-bYrutc+neOyWxtZdbB2USbQttZN0mXaOyYLIsaTbJhFsfpXyGWUxJpEuO1rJ8IIJm2qH4+xJT0mxUSsEDTYwdQ==", + "version": "4.2.6", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.6.tgz", + "integrity": "sha512-JSbALU3G+JS4kyBZPqnJ3hxIYwOVRV7r9GNQMS6j5VsQDo5+Es5nddLfr9TQlxZLNHPvKSh+XSB0OuWGfSWFcA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -5632,15 +5214,15 @@ } }, "node_modules/@smithy/node-config-provider": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.5.tgz", - "integrity": "sha512-UTurh1C4qkVCtqggI36DGbLB2Kv8UlcFdMXDcWMbqVY2uRg0XmT9Pb4Vj6oSQ34eizO1fvR0RnFV4Axw4IrrAg==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.6.tgz", + "integrity": "sha512-fYEyL59Qe82Ha1p97YQTMEQPJYmBS+ux76foqluaTVWoG9Px5J53w6NvXZNE3wP7lIicLDF7Vj1Em18XTX7fsA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -5648,16 +5230,16 @@ } }, "node_modules/@smithy/node-http-handler": { - "version": "4.4.5", - "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.4.5.tgz", - "integrity": "sha512-CMnzM9R2WqlqXQGtIlsHMEZfXKJVTIrqCNoSd/QpAyp+Dw0a1Vps13l6ma1fH8g7zSPNsA59B/kWgeylFuA/lw==", + "version": "4.4.6", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.4.6.tgz", + "integrity": "sha512-Gsb9jf4ido5BhPfani4ggyrKDd3ZK+vTFWmUaZeFg5G3E5nhFmqiTzAIbHqmPs1sARuJawDiGMGR/nY+Gw6+aQ==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/abort-controller": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/querystring-builder": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/abort-controller": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/querystring-builder": "^4.2.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -5665,13 +5247,13 @@ } }, "node_modules/@smithy/property-provider": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.5.tgz", - "integrity": "sha512-8iLN1XSE1rl4MuxvQ+5OSk/Zb5El7NJZ1td6Tn+8dQQHIjp59Lwl6bd0+nzw6SKm2wSSriH2v/I9LPzUic7EOg==", + "version": "4.2.6", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.6.tgz", + "integrity": "sha512-a/tGSLPtaia2krbRdwR4xbZKO8lU67DjMk/jfY4QKt4PRlKML+2tL/gmAuhNdFDioO6wOq0sXkfnddNFH9mNUA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -5679,13 +5261,13 @@ } }, "node_modules/@smithy/protocol-http": { - "version": "5.3.5", - "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.5.tgz", - "integrity": "sha512-RlaL+sA0LNMp03bf7XPbFmT5gN+w3besXSWMkA8rcmxLSVfiEXElQi4O2IWwPfxzcHkxqrwBFMbngB8yx/RvaQ==", + "version": "5.3.6", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.6.tgz", + "integrity": "sha512-qLRZzP2+PqhE3OSwvY2jpBbP0WKTZ9opTsn+6IWYI0SKVpbG+imcfNxXPq9fj5XeaUTr7odpsNpK6dmoiM1gJQ==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -5693,13 +5275,13 @@ } }, "node_modules/@smithy/querystring-builder": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.5.tgz", - "integrity": "sha512-y98otMI1saoajeik2kLfGyRp11e5U/iJYH/wLCh3aTV/XutbGT9nziKGkgCaMD1ghK7p6htHMm6b6scl9JRUWg==", + "version": "4.2.6", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.6.tgz", + "integrity": "sha512-MeM9fTAiD3HvoInK/aA8mgJaKQDvm8N0dKy6EiFaCfgpovQr4CaOkJC28XqlSRABM+sHdSQXbC8NZ0DShBMHqg==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "@smithy/util-uri-escape": "^4.2.0", "tslib": "^2.6.2" }, @@ -5708,13 +5290,13 @@ } }, "node_modules/@smithy/querystring-parser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.5.tgz", - "integrity": "sha512-031WCTdPYgiQRYNPXznHXof2YM0GwL6SeaSyTH/P72M1Vz73TvCNH2Nq8Iu2IEPq9QP2yx0/nrw5YmSeAi/AjQ==", + "version": "4.2.6", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.6.tgz", + "integrity": "sha512-YmWxl32SQRw/kIRccSOxzS/Ib8/b5/f9ex0r5PR40jRJg8X1wgM3KrR2In+8zvOGVhRSXgvyQpw9yOSlmfmSnA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -5722,26 +5304,26 @@ } }, "node_modules/@smithy/service-error-classification": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.2.5.tgz", - "integrity": "sha512-8fEvK+WPE3wUAcDvqDQG1Vk3ANLR8Px979te96m84CbKAjBVf25rPYSzb4xU4hlTyho7VhOGnh5i62D/JVF0JQ==", + "version": "4.2.6", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.2.6.tgz", + "integrity": "sha512-Q73XBrzJlGTut2nf5RglSntHKgAG0+KiTJdO5QQblLfr4TdliGwIAha1iZIjwisc3rA5ulzqwwsYC6xrclxVQg==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0" + "@smithy/types": "^4.10.0" }, "engines": { "node": ">=18.0.0" } }, "node_modules/@smithy/shared-ini-file-loader": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.0.tgz", - "integrity": "sha512-5WmZ5+kJgJDjwXXIzr1vDTG+RhF9wzSODQBfkrQ2VVkYALKGvZX1lgVSxEkgicSAFnFhPj5rudJV0zoinqS0bA==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.1.tgz", + "integrity": "sha512-tph+oQYPbpN6NamF030hx1gb5YN2Plog+GLaRHpoEDwp8+ZPG26rIJvStG9hkWzN2HBn3HcWg0sHeB0tmkYzqA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -5749,17 +5331,17 @@ } }, "node_modules/@smithy/signature-v4": { - "version": "5.3.5", - "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.5.tgz", - "integrity": "sha512-xSUfMu1FT7ccfSXkoLl/QRQBi2rOvi3tiBZU2Tdy3I6cgvZ6SEi9QNey+lqps/sJRnogIS+lq+B1gxxbra2a/w==", + "version": "5.3.6", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.6.tgz", + "integrity": "sha512-P1TXDHuQMadTMTOBv4oElZMURU4uyEhxhHfn+qOc2iofW9Rd4sZtBGx58Lzk112rIGVEYZT8eUMK4NftpewpRA==", "dev": true, "license": "Apache-2.0", "dependencies": { "@smithy/is-array-buffer": "^4.2.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "@smithy/util-hex-encoding": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-middleware": "^4.2.6", "@smithy/util-uri-escape": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" @@ -5769,18 +5351,18 @@ } }, "node_modules/@smithy/smithy-client": { - "version": "4.9.10", - "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.9.10.tgz", - "integrity": "sha512-Jaoz4Jw1QYHc1EFww/E6gVtNjhoDU+gwRKqXP6C3LKYqqH2UQhP8tMP3+t/ePrhaze7fhLE8vS2q6vVxBANFTQ==", + "version": "4.10.1", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.10.1.tgz", + "integrity": "sha512-1ovWdxzYprhq+mWqiGZlt3kF69LJthuQcfY9BIyHx9MywTFKzFapluku1QXoaBB43GCsLDxNqS+1v30ure69AA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.18.7", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", - "@smithy/util-stream": "^4.5.6", + "@smithy/core": "^3.19.0", + "@smithy/middleware-endpoint": "^4.4.0", + "@smithy/middleware-stack": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", + "@smithy/util-stream": "^4.5.7", "tslib": "^2.6.2" }, "engines": { @@ -5788,9 +5370,9 @@ } }, "node_modules/@smithy/types": { - "version": "4.9.0", - "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.9.0.tgz", - "integrity": "sha512-MvUbdnXDTwykR8cB1WZvNNwqoWVaTRA0RLlLmf/cIFNMM2cKWz01X4Ly6SMC4Kks30r8tT3Cty0jmeWfiuyHTA==", + "version": "4.10.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.10.0.tgz", + "integrity": "sha512-K9mY7V/f3Ul+/Gz4LJANZ3vJ/yiBIwCyxe0sPT4vNJK63Srvd+Yk1IzP0t+nE7XFSpIGtzR71yljtnqpUTYFlQ==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -5801,14 +5383,14 @@ } }, "node_modules/@smithy/url-parser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.5.tgz", - "integrity": "sha512-VaxMGsilqFnK1CeBX+LXnSuaMx4sTL/6znSZh2829txWieazdVxr54HmiyTsIbpOTLcf5nYpq9lpzmwRdxj6rQ==", + "version": "4.2.6", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.6.tgz", + "integrity": "sha512-tVoyzJ2vXp4R3/aeV4EQjBDmCuWxRa8eo3KybL7Xv4wEM16nObYh7H1sNfcuLWHAAAzb0RVyxUz1S3sGj4X+Tg==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/querystring-parser": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/querystring-parser": "^4.2.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -5884,15 +5466,15 @@ } }, "node_modules/@smithy/util-defaults-mode-browser": { - "version": "4.3.13", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.13.tgz", - "integrity": "sha512-hlVLdAGrVfyNei+pKIgqDTxfu/ZI2NSyqj4IDxKd5bIsIqwR/dSlkxlPaYxFiIaDVrBy0he8orsFy+Cz119XvA==", + "version": "4.3.15", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.15.tgz", + "integrity": "sha512-LiZQVAg/oO8kueX4c+oMls5njaD2cRLXRfcjlTYjhIqmwHnCwkQO5B3dMQH0c5PACILxGAQf6Mxsq7CjlDc76A==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/property-provider": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -5900,18 +5482,18 @@ } }, "node_modules/@smithy/util-defaults-mode-node": { - "version": "4.2.16", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.16.tgz", - "integrity": "sha512-F1t22IUiJLHrxW9W1CQ6B9PN+skZ9cqSuzB18Eh06HrJPbjsyZ7ZHecAKw80DQtyGTRcVfeukKaCRYebFwclbg==", + "version": "4.2.18", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.18.tgz", + "integrity": "sha512-Kw2J+KzYm9C9Z9nY6+W0tEnoZOofstVCMTshli9jhQbQCy64rueGfKzPfuFBnVUqZD9JobxTh2DzHmPkp/Va/Q==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/config-resolver": "^4.4.3", - "@smithy/credential-provider-imds": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/credential-provider-imds": "^4.2.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -5919,14 +5501,14 @@ } }, "node_modules/@smithy/util-endpoints": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.2.5.tgz", - "integrity": "sha512-3O63AAWu2cSNQZp+ayl9I3NapW1p1rR5mlVHcF6hAB1dPZUQFfRPYtplWX/3xrzWthPGj5FqB12taJJCfH6s8A==", + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.2.6.tgz", + "integrity": "sha512-v60VNM2+mPvgHCBXEfMCYrQ0RepP6u6xvbAkMenfe4Mi872CqNkJzgcnQL837e8NdeDxBgrWQRTluKq5Lqdhfg==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -5947,13 +5529,13 @@ } }, "node_modules/@smithy/util-middleware": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.5.tgz", - "integrity": "sha512-6Y3+rvBF7+PZOc40ybeZMcGln6xJGVeY60E7jy9Mv5iKpMJpHgRE6dKy9ScsVxvfAYuEX4Q9a65DQX90KaQ3bA==", + "version": "4.2.6", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.6.tgz", + "integrity": "sha512-qrvXUkxBSAFomM3/OEMuDVwjh4wtqK8D2uDZPShzIqOylPst6gor2Cdp6+XrH4dyksAWq/bE2aSDYBTTnj0Rxg==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -5961,14 +5543,14 @@ } }, "node_modules/@smithy/util-retry": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.2.5.tgz", - "integrity": "sha512-GBj3+EZBbN4NAqJ/7pAhsXdfzdlznOh8PydUijy6FpNIMnHPSMO2/rP4HKu+UFeikJxShERk528oy7GT79YiJg==", + "version": "4.2.6", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.2.6.tgz", + "integrity": "sha512-x7CeDQLPQ9cb6xN7fRJEjlP9NyGW/YeXWc4j/RUhg4I+H60F0PEeRc2c/z3rm9zmsdiMFzpV/rT+4UHW6KM1SA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/service-error-classification": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/service-error-classification": "^4.2.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -5976,15 +5558,15 @@ } }, "node_modules/@smithy/util-stream": { - "version": "4.5.6", - "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.6.tgz", - "integrity": "sha512-qWw/UM59TiaFrPevefOZ8CNBKbYEP6wBAIlLqxn3VAIo9rgnTNc4ASbVrqDmhuwI87usnjhdQrxodzAGFFzbRQ==", + "version": "4.5.7", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.7.tgz", + "integrity": "sha512-Uuy4S5Aj4oF6k1z+i2OtIBJUns4mlg29Ph4S+CqjR+f4XXpSFVgTCYLzMszHJTicYDBxKFtwq2/QSEDSS5l02A==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/types": "^4.9.0", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/types": "^4.10.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-buffer-from": "^4.2.0", "@smithy/util-hex-encoding": "^4.2.0", @@ -6080,16 +5662,6 @@ "integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==", "license": "MIT" }, - "node_modules/@tootallnate/once": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", - "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", - "license": "MIT", - "optional": true, - "engines": { - "node": ">= 10" - } - }, "node_modules/@tsconfig/node10": { "version": "1.0.12", "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.12.tgz", @@ -6188,6 +5760,7 @@ "version": "1.19.6", "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.6.tgz", "integrity": "sha512-HLFeCYgz89uk22N5Qg3dvGvsv46B8GLvKKo1zKG4NybA8U2DiEO3w9lqGg29t/tfLRJpJ6iQxnVw4OnB7MoM9g==", + "dev": true, "license": "MIT", "dependencies": { "@types/connect": "*", @@ -6204,13 +5777,6 @@ "@types/redis": "^2.8.0" } }, - "node_modules/@types/caseless": { - "version": "0.12.5", - "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz", - "integrity": "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==", - "license": "MIT", - "optional": true - }, "node_modules/@types/command-line-args": { "version": "5.2.3", "resolved": "https://registry.npmjs.org/@types/command-line-args/-/command-line-args-5.2.3.tgz", @@ -6227,6 +5793,7 @@ "version": "3.4.38", "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "dev": true, "license": "MIT", "dependencies": { "@types/node": "*" @@ -6271,7 +5838,6 @@ "integrity": "sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@types/estree": "*", "@types/json-schema": "*" @@ -6301,7 +5867,6 @@ "integrity": "sha512-sKYVuV7Sv9fbPIt/442koC7+IIwK5olP1KWeD88e/idgoJqDm3JV/YUiPwkoKK92ylff2MGxSz1CSjsXelx0YA==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@types/body-parser": "*", "@types/express-serve-static-core": "^5.0.0", @@ -6325,6 +5890,7 @@ "version": "2.0.5", "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.5.tgz", "integrity": "sha512-r8Tayk8HJnX0FztbZN7oVqGccWgw98T/0neJphO91KkmOzug1KkofZURD4UaD5uH8AqcFLfdPErnBod0u71/qg==", + "dev": true, "license": "MIT" }, "node_modules/@types/inquirer": { @@ -6402,13 +5968,6 @@ "@types/node": "*" } }, - "node_modules/@types/long": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz", - "integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==", - "license": "MIT", - "optional": true - }, "node_modules/@types/luxon": { "version": "3.7.1", "resolved": "https://registry.npmjs.org/@types/luxon/-/luxon-3.7.1.tgz", @@ -6422,12 +5981,6 @@ "dev": true, "license": "MIT" }, - "node_modules/@types/mime": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", - "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", - "license": "MIT" - }, "node_modules/@types/mjml": { "version": "4.7.4", "resolved": "https://registry.npmjs.org/@types/mjml/-/mjml-4.7.4.tgz", @@ -6462,11 +6015,10 @@ } }, "node_modules/@types/node": { - "version": "22.19.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.2.tgz", - "integrity": "sha512-LPM2G3Syo1GLzXLGJAKdqoU35XvrWzGJ21/7sgZTUpbkBaOasTj8tjwn6w+hCkqaa1TfJ/w67rJSwYItlJ2mYw==", + "version": "22.19.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.3.tgz", + "integrity": "sha512-1N9SBnWYOJTrNZCdh/yJE+t910Y128BoyY+zBLWhL3r0TYzlTmFdXrPwHL9DyFZmlEXNQQolTZh3KHV31QDhyA==", "license": "MIT", - "peer": true, "dependencies": { "undici-types": "~6.21.0" } @@ -6559,12 +6111,14 @@ "version": "6.14.0", "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.14.0.tgz", "integrity": "sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==", + "dev": true, "license": "MIT" }, "node_modules/@types/range-parser": { "version": "1.2.7", "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", + "dev": true, "license": "MIT" }, "node_modules/@types/redis": { @@ -6576,60 +6130,6 @@ "@types/node": "*" } }, - "node_modules/@types/request": { - "version": "2.48.13", - "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.13.tgz", - "integrity": "sha512-FGJ6udDNUCjd19pp0Q3iTiDkwhYup7J8hpMW9c4k53NrccQFFWKRho6hvtPPEhnXWKvukfwAlB6DbDz4yhH5Gg==", - "license": "MIT", - "optional": true, - "dependencies": { - "@types/caseless": "*", - "@types/node": "*", - "@types/tough-cookie": "*", - "form-data": "^2.5.5" - } - }, - "node_modules/@types/request/node_modules/form-data": { - "version": "2.5.5", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.5.tgz", - "integrity": "sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A==", - "license": "MIT", - "optional": true, - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "es-set-tostringtag": "^2.1.0", - "hasown": "^2.0.2", - "mime-types": "^2.1.35", - "safe-buffer": "^5.2.1" - }, - "engines": { - "node": ">= 0.12" - } - }, - "node_modules/@types/request/node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "license": "MIT", - "optional": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/@types/request/node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "license": "MIT", - "optional": true, - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, "node_modules/@types/send": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/@types/send/-/send-1.2.1.tgz", @@ -6723,17 +6223,11 @@ "resolved": "https://registry.npmjs.org/@types/through/-/through-0.0.33.tgz", "integrity": "sha512-HsJ+z3QuETzP3cswwtzt2vEIiHBk/dCcHGhbmG5X3ecnwFD/lPrMpliGXxSCg03L9AhrdwA4Oz/qfspkDW+xGQ==", "license": "MIT", + "peer": true, "dependencies": { "@types/node": "*" } }, - "node_modules/@types/tough-cookie": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz", - "integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==", - "license": "MIT", - "optional": true - }, "node_modules/@types/tunnel-ssh": { "version": "5.0.4", "resolved": "https://registry.npmjs.org/@types/tunnel-ssh/-/tunnel-ssh-5.0.4.tgz", @@ -6744,6 +6238,13 @@ "@types/ssh2": "*" } }, + "node_modules/@types/twitter-text": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/@types/twitter-text/-/twitter-text-3.1.10.tgz", + "integrity": "sha512-+wF6TYQtvokyCc42VKF9OAvEgro0JIAEMor+A7eZsZtkgD/LPAIJx5+g7529nQUzRpas2hlmJEPfZgkzxr0xnA==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/validator": { "version": "13.15.10", "resolved": "https://registry.npmjs.org/@types/validator/-/validator-13.15.10.tgz", @@ -6783,17 +6284,17 @@ "license": "MIT" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.49.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.49.0.tgz", - "integrity": "sha512-JXij0vzIaTtCwu6SxTh8qBc66kmf1xs7pI4UOiMDFVct6q86G0Zs7KRcEoJgY3Cav3x5Tq0MF5jwgpgLqgKG3A==", + "version": "8.50.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.50.0.tgz", + "integrity": "sha512-O7QnmOXYKVtPrfYzMolrCTfkezCJS9+ljLdKW/+DCvRsc3UAz+sbH6Xcsv7p30+0OwUbeWfUDAQE0vpabZ3QLg==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.49.0", - "@typescript-eslint/type-utils": "8.49.0", - "@typescript-eslint/utils": "8.49.0", - "@typescript-eslint/visitor-keys": "8.49.0", + "@typescript-eslint/scope-manager": "8.50.0", + "@typescript-eslint/type-utils": "8.50.0", + "@typescript-eslint/utils": "8.50.0", + "@typescript-eslint/visitor-keys": "8.50.0", "ignore": "^7.0.0", "natural-compare": "^1.4.0", "ts-api-utils": "^2.1.0" @@ -6806,7 +6307,7 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^8.49.0", + "@typescript-eslint/parser": "^8.50.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } @@ -6822,17 +6323,16 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "8.49.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.49.0.tgz", - "integrity": "sha512-N9lBGA9o9aqb1hVMc9hzySbhKibHmB+N3IpoShyV6HyQYRGIhlrO5rQgttypi+yEeKsKI4idxC8Jw6gXKD4THA==", + "version": "8.50.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.50.0.tgz", + "integrity": "sha512-6/cmF2piao+f6wSxUsJLZjck7OQsYyRtcOZS02k7XINSNlz93v6emM8WutDQSXnroG2xwYlEVHJI+cPA7CPM3Q==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { - "@typescript-eslint/scope-manager": "8.49.0", - "@typescript-eslint/types": "8.49.0", - "@typescript-eslint/typescript-estree": "8.49.0", - "@typescript-eslint/visitor-keys": "8.49.0", + "@typescript-eslint/scope-manager": "8.50.0", + "@typescript-eslint/types": "8.50.0", + "@typescript-eslint/typescript-estree": "8.50.0", + "@typescript-eslint/visitor-keys": "8.50.0", "debug": "^4.3.4" }, "engines": { @@ -6848,14 +6348,14 @@ } }, "node_modules/@typescript-eslint/project-service": { - "version": "8.49.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.49.0.tgz", - "integrity": "sha512-/wJN0/DKkmRUMXjZUXYZpD1NEQzQAAn9QWfGwo+Ai8gnzqH7tvqS7oNVdTjKqOcPyVIdZdyCMoqN66Ia789e7g==", + "version": "8.50.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.50.0.tgz", + "integrity": "sha512-Cg/nQcL1BcoTijEWyx4mkVC56r8dj44bFDvBdygifuS20f3OZCHmFbjF34DPSi07kwlFvqfv/xOLnJ5DquxSGQ==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/tsconfig-utils": "^8.49.0", - "@typescript-eslint/types": "^8.49.0", + "@typescript-eslint/tsconfig-utils": "^8.50.0", + "@typescript-eslint/types": "^8.50.0", "debug": "^4.3.4" }, "engines": { @@ -6870,14 +6370,14 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "8.49.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.49.0.tgz", - "integrity": "sha512-npgS3zi+/30KSOkXNs0LQXtsg9ekZ8OISAOLGWA/ZOEn0ZH74Ginfl7foziV8DT+D98WfQ5Kopwqb/PZOaIJGg==", + "version": "8.50.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.50.0.tgz", + "integrity": "sha512-xCwfuCZjhIqy7+HKxBLrDVT5q/iq7XBVBXLn57RTIIpelLtEIZHXAF/Upa3+gaCpeV1NNS5Z9A+ID6jn50VD4A==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.49.0", - "@typescript-eslint/visitor-keys": "8.49.0" + "@typescript-eslint/types": "8.50.0", + "@typescript-eslint/visitor-keys": "8.50.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -6888,9 +6388,9 @@ } }, "node_modules/@typescript-eslint/tsconfig-utils": { - "version": "8.49.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.49.0.tgz", - "integrity": "sha512-8prixNi1/6nawsRYxet4YOhnbW+W9FK/bQPxsGB1D3ZrDzbJ5FXw5XmzxZv82X3B+ZccuSxo/X8q9nQ+mFecWA==", + "version": "8.50.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.50.0.tgz", + "integrity": "sha512-vxd3G/ybKTSlm31MOA96gqvrRGv9RJ7LGtZCn2Vrc5htA0zCDvcMqUkifcjrWNNKXHUU3WCkYOzzVSFBd0wa2w==", "dev": true, "license": "MIT", "engines": { @@ -6905,15 +6405,15 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.49.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.49.0.tgz", - "integrity": "sha512-KTExJfQ+svY8I10P4HdxKzWsvtVnsuCifU5MvXrRwoP2KOlNZ9ADNEWWsQTJgMxLzS5VLQKDjkCT/YzgsnqmZg==", + "version": "8.50.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.50.0.tgz", + "integrity": "sha512-7OciHT2lKCewR0mFoBrvZJ4AXTMe/sYOe87289WAViOocEmDjjv8MvIOT2XESuKj9jp8u3SZYUSh89QA4S1kQw==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.49.0", - "@typescript-eslint/typescript-estree": "8.49.0", - "@typescript-eslint/utils": "8.49.0", + "@typescript-eslint/types": "8.50.0", + "@typescript-eslint/typescript-estree": "8.50.0", + "@typescript-eslint/utils": "8.50.0", "debug": "^4.3.4", "ts-api-utils": "^2.1.0" }, @@ -6930,9 +6430,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "8.49.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.49.0.tgz", - "integrity": "sha512-e9k/fneezorUo6WShlQpMxXh8/8wfyc+biu6tnAqA81oWrEic0k21RHzP9uqqpyBBeBKu4T+Bsjy9/b8u7obXQ==", + "version": "8.50.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.50.0.tgz", + "integrity": "sha512-iX1mgmGrXdANhhITbpp2QQM2fGehBse9LbTf0sidWK6yg/NE+uhV5dfU1g6EYPlcReYmkE9QLPq/2irKAmtS9w==", "dev": true, "license": "MIT", "engines": { @@ -6944,16 +6444,16 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.49.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.49.0.tgz", - "integrity": "sha512-jrLdRuAbPfPIdYNppHJ/D0wN+wwNfJ32YTAm10eJVsFmrVpXQnDWBn8niCSMlWjvml8jsce5E/O+86IQtTbJWA==", + "version": "8.50.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.50.0.tgz", + "integrity": "sha512-W7SVAGBR/IX7zm1t70Yujpbk+zdPq/u4soeFSknWFdXIFuWsBGBOUu/Tn/I6KHSKvSh91OiMuaSnYp3mtPt5IQ==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/project-service": "8.49.0", - "@typescript-eslint/tsconfig-utils": "8.49.0", - "@typescript-eslint/types": "8.49.0", - "@typescript-eslint/visitor-keys": "8.49.0", + "@typescript-eslint/project-service": "8.50.0", + "@typescript-eslint/tsconfig-utils": "8.50.0", + "@typescript-eslint/types": "8.50.0", + "@typescript-eslint/visitor-keys": "8.50.0", "debug": "^4.3.4", "minimatch": "^9.0.4", "semver": "^7.6.0", @@ -6998,16 +6498,16 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.49.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.49.0.tgz", - "integrity": "sha512-N3W7rJw7Rw+z1tRsHZbK395TWSYvufBXumYtEGzypgMUthlg0/hmCImeA8hgO2d2G4pd7ftpxxul2J8OdtdaFA==", + "version": "8.50.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.50.0.tgz", + "integrity": "sha512-87KgUXET09CRjGCi2Ejxy3PULXna63/bMYv72tCAlDJC3Yqwln0HiFJ3VJMst2+mEtNtZu5oFvX4qJGjKsnAgg==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.7.0", - "@typescript-eslint/scope-manager": "8.49.0", - "@typescript-eslint/types": "8.49.0", - "@typescript-eslint/typescript-estree": "8.49.0" + "@typescript-eslint/scope-manager": "8.50.0", + "@typescript-eslint/types": "8.50.0", + "@typescript-eslint/typescript-estree": "8.50.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -7022,13 +6522,13 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.49.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.49.0.tgz", - "integrity": "sha512-LlKaciDe3GmZFphXIc79THF/YYBugZ7FS1pO581E/edlVVNbZKDy93evqmrfQ9/Y4uN0vVhX4iuchq26mK/iiA==", + "version": "8.50.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.50.0.tgz", + "integrity": "sha512-Xzmnb58+Db78gT/CCj/PVCvK+zxbnsw6F+O1oheYszJbBSdEjVhQi3C/Xttzxgi/GLmpvOggRs1RFpiJ8+c34Q==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.49.0", + "@typescript-eslint/types": "8.50.0", "eslint-visitor-keys": "^4.2.1" }, "engines": { @@ -7505,9 +7005,9 @@ "license": "Apache-2.0" }, "node_modules/@zone-eu/mailsplit": { - "version": "5.4.7", - "resolved": "https://registry.npmjs.org/@zone-eu/mailsplit/-/mailsplit-5.4.7.tgz", - "integrity": "sha512-jApX86aDgolMz08pP20/J2zcns02NSK3zSiYouf01QQg4250L+GUAWSWicmS7eRvs+Z7wP7QfXrnkaTBGrIpwQ==", + "version": "5.4.8", + "resolved": "https://registry.npmjs.org/@zone-eu/mailsplit/-/mailsplit-5.4.8.tgz", + "integrity": "sha512-eEyACj4JZ7sjzRvy26QhLgKEMWwQbsw1+QZnlLX+/gihcNH07lVPOcnwf5U6UAL7gkc//J3jVd76o/WS+taUiA==", "license": "(MIT OR EUPL-1.1+)", "optional": true, "dependencies": { @@ -7557,7 +7057,6 @@ "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "devOptional": true, "license": "MIT", - "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -7610,6 +7109,16 @@ "node": ">=0.8" } }, + "node_modules/adm-zip": { + "version": "0.5.16", + "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.16.tgz", + "integrity": "sha512-TGw5yVi4saajsSEgz25grObGHEUaDrniwvA2qwSC060KfqGPdglhvPMA2lPIoxs3PQIItj2iag35fONcQqgUaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0" + } + }, "node_modules/agent-base": { "version": "7.1.4", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz", @@ -7637,7 +7146,6 @@ "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -7866,9 +7374,9 @@ } }, "node_modules/apache-arrow/node_modules/@types/node": { - "version": "24.10.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.2.tgz", - "integrity": "sha512-WOhQTZ4G8xZ1tjJTvKOpyEVSGgOTvJAfDK3FNFgELyaTpzhdgHVHeqW8V+UJvzF5BT+/B54T/1S2K6gd9c7bbA==", + "version": "24.10.4", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.4.tgz", + "integrity": "sha512-vnDVpYPMzs4wunl27jHrfmwojOGKya0xyM3sH+UE5iv5uPS6vX7UIoh6m+vQc5LGBq52HBKPIn/zcSZVzeDEZg==", "license": "MIT", "dependencies": { "undici-types": "~7.16.0" @@ -7924,16 +7432,6 @@ "dev": true, "license": "MIT" }, - "node_modules/arrify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", - "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", - "license": "MIT", - "optional": true, - "engines": { - "node": ">=8" - } - }, "node_modules/asap": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", @@ -7962,16 +7460,6 @@ "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", "integrity": "sha512-eAkdoKxU6/LkKDBzLpT+t6Ff5EtfSF4wx1WfJiPEEV7WNLnDaRXk0oVysiEPm262roaachGexwUv94WhSgN5TQ==" }, - "node_modules/async-retry": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz", - "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==", - "license": "MIT", - "optional": true, - "dependencies": { - "retry": "0.13.1" - } - }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -8004,6 +7492,21 @@ "proxy-from-env": "^1.1.0" } }, + "node_modules/b4a": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.7.3.tgz", + "integrity": "sha512-5Q2mfq2WfGuFp3uS//0s6baOJLMoVduPYVeNmDYxu5OUA1/cBfvr2RIS7vi62LdNj/urk1hfmj867I3qt6uZ7Q==", + "dev": true, + "license": "Apache-2.0", + "peerDependencies": { + "react-native-b4a": "*" + }, + "peerDependenciesMeta": { + "react-native-b4a": { + "optional": true + } + } + }, "node_modules/babel-jest": { "version": "30.2.0", "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-30.2.0.tgz", @@ -8122,6 +7625,21 @@ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "license": "MIT" }, + "node_modules/bare-events": { + "version": "2.8.2", + "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.8.2.tgz", + "integrity": "sha512-riJjyv1/mHLIPX4RwiK+oW9/4c3TEUeORHKefKAKnZ5kyslbN+HXowtbaVEqt4IMUB7OXlfixcs6gsFeo/jhiQ==", + "dev": true, + "license": "Apache-2.0", + "peerDependencies": { + "bare-abort-controller": "*" + }, + "peerDependenciesMeta": { + "bare-abort-controller": { + "optional": true + } + } + }, "node_modules/base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", @@ -8161,9 +7679,9 @@ } }, "node_modules/baseline-browser-mapping": { - "version": "2.9.5", - "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.5.tgz", - "integrity": "sha512-D5vIoztZOq1XM54LUdttJVc96ggEsIfju2JBvht06pSzpckp3C7HReun67Bghzrtdsq9XdMGbSSB3v3GhMNmAA==", + "version": "2.9.7", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.7.tgz", + "integrity": "sha512-k9xFKplee6KIio3IDbwj+uaCLpqzOwakOgmqzPezM0sFJlFKcg30vk2wOiAJtkTSfx0SSQDSe8q+mWA/fSH5Zg==", "dev": true, "license": "Apache-2.0", "bin": { @@ -8398,7 +7916,6 @@ } ], "license": "MIT", - "peer": true, "dependencies": { "baseline-browser-mapping": "^2.9.0", "caniuse-lite": "^1.0.30001759", @@ -8501,7 +8018,6 @@ "resolved": "https://registry.npmjs.org/bull/-/bull-4.16.5.tgz", "integrity": "sha512-lDsx2BzkKe7gkCYiT5Acj02DpTwDznl/VNN7Psn7M3USPG7Vs/BaClZJJTAG+ufAR9++N1/NiUTdaFBWDIl5TQ==", "license": "MIT", - "peer": true, "dependencies": { "cron-parser": "^4.9.0", "get-port": "^5.1.1", @@ -8613,9 +8129,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001759", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001759.tgz", - "integrity": "sha512-Pzfx9fOKoKvevQf8oCXoyNRQ5QyxJj+3O0Rqx2V5oxT61KGx8+n6hV/IUyJeifUci2clnmmKVpvtiqRzgiWjSw==", + "version": "1.0.30001760", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001760.tgz", + "integrity": "sha512-7AAMPcueWELt1p3mi13HR/LHH0TJLT11cnwDJEs3xA4+CK/PLKeO9Kl1oru24htkyUKtkGCvAx4ohB0Ttry8Dw==", "dev": true, "funding": [ { @@ -8759,7 +8275,6 @@ "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", "devOptional": true, "license": "MIT", - "peer": true, "dependencies": { "readdirp": "^4.0.1" }, @@ -8807,15 +8322,13 @@ "version": "0.5.1", "resolved": "https://registry.npmjs.org/class-transformer/-/class-transformer-0.5.1.tgz", "integrity": "sha512-SQa1Ws6hUbfC98vKGxZH3KFY0Y1lm5Zm0SY8XX9zbK7FJCyVEac3ATW0RIpwzW+oOfmHE5PMPufDG9hCfoEOMw==", - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/class-validator": { "version": "0.14.3", "resolved": "https://registry.npmjs.org/class-validator/-/class-validator-0.14.3.tgz", "integrity": "sha512-rXXekcjofVN1LTOSw+u4u9WXVEUvNBVjORW154q/IdmYWy1nMbOU9aNtZB0t8m+FJQ9q91jlr2f9CwwUFdFMRA==", "license": "MIT", - "peer": true, "dependencies": { "@types/validator": "^13.15.3", "libphonenumber-js": "^1.11.1", @@ -9270,6 +8783,14 @@ "dev": true, "license": "MIT" }, + "node_modules/core-js": { + "version": "2.6.12", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.12.tgz", + "integrity": "sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ==", + "deprecated": "core-js@<3.23.3 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Some versions have web compatibility issues. Please, upgrade your dependencies to the actual version of core-js.", + "hasInstallScript": true, + "license": "MIT" + }, "node_modules/core-util-is": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", @@ -9725,19 +9246,6 @@ "node": ">= 0.4" } }, - "node_modules/duplexify": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz", - "integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==", - "license": "MIT", - "optional": true, - "dependencies": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.2" - } - }, "node_modules/eastasianwidth": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", @@ -9875,16 +9383,6 @@ "node": ">=8.10.0" } }, - "node_modules/end-of-stream": { - "version": "1.4.5", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", - "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", - "license": "MIT", - "optional": true, - "dependencies": { - "once": "^1.4.0" - } - }, "node_modules/engine.io": { "version": "6.6.4", "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.6.4.tgz", @@ -9975,9 +9473,9 @@ } }, "node_modules/enhanced-resolve": { - "version": "5.18.3", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.3.tgz", - "integrity": "sha512-d4lC8xfavMeBjzGr2vECC3fsGXziXZQyJxD868h2M/mBI3PwAuODxAkLkq5HYuvrPYcUtiLzsTo8U3PgX3Ocww==", + "version": "5.18.4", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.4.tgz", + "integrity": "sha512-LgQMM4WXU3QI+SYgEc2liRgznaD5ojbmY3sb8LxyguVkIg5FxdpTkvk72te2R38/TGKxH634oLxXRGY6d7AP+Q==", "dev": true, "license": "MIT", "dependencies": { @@ -10014,6 +9512,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/err-code": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", + "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==", + "license": "MIT" + }, "node_modules/error-ex": { "version": "1.3.4", "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz", @@ -10127,12 +9631,11 @@ } }, "node_modules/eslint": { - "version": "9.39.1", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.39.1.tgz", - "integrity": "sha512-BhHmn2yNOFA9H9JmmIVKJmd288g9hrVRDkdoIgRCRuSySRUHH7r/DI6aAXW9T1WwUuY3DFgrcaqB+deURBLR5g==", + "version": "9.39.2", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.39.2.tgz", + "integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/regexpp": "^4.12.1", @@ -10140,7 +9643,7 @@ "@eslint/config-helpers": "^0.4.2", "@eslint/core": "^0.17.0", "@eslint/eslintrc": "^3.3.1", - "@eslint/js": "9.39.1", + "@eslint/js": "9.39.2", "@eslint/plugin-kit": "^0.4.1", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", @@ -10193,7 +9696,6 @@ "integrity": "sha512-82GZUjRS0p/jganf6q1rEO25VSoHH0hKPCTrgillPjdI/3bgBhAE1QzHrHTizjpRvy6pGAvKjDJtk2pF9NDq8w==", "dev": true, "license": "MIT", - "peer": true, "bin": { "eslint-config-prettier": "bin/cli.js" }, @@ -10377,6 +9879,16 @@ "node": ">=0.8.x" } }, + "node_modules/events-universal": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/events-universal/-/events-universal-1.0.1.tgz", + "integrity": "sha512-LUd5euvbMLpwOF8m6ivPCbhQeSiYVNb8Vs0fQ8QjXo0JTkEHpz8pxdQf0gStltaPpw0Cca8b39KxvK9cfKRiAw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "bare-events": "^2.7.0" + } + }, "node_modules/execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", @@ -10436,11 +9948,26 @@ "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, + "node_modules/expo-server-sdk": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/expo-server-sdk/-/expo-server-sdk-4.0.0.tgz", + "integrity": "sha512-zi83XtG2pqyP3gyn1JIRYkydo2i6HU3CYaWo/VvhZG/F29U+QIDv6LBEUsWf4ddZlVE7c9WN1N8Be49rHgO8OQ==", + "license": "MIT", + "dependencies": { + "node-fetch": "^2.6.0", + "promise-limit": "^2.7.0", + "promise-retry": "^2.0.1" + }, + "engines": { + "node": ">=20" + } + }, "node_modules/express": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/express/-/express-5.2.1.tgz", "integrity": "sha512-hIS4idWWai69NezIdRt2xFVofaF4j+6INOpJlVOLDO8zXGpUVEVzIYk12UUi2JzjEzWL3IOAxcTubgz9Po0yXw==", "license": "MIT", + "peer": true, "dependencies": { "accepts": "^2.0.0", "body-parser": "^2.2.1", @@ -10484,6 +10011,7 @@ "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", "license": "MIT", + "peer": true, "engines": { "node": ">=6.6.0" } @@ -10501,15 +10029,6 @@ "license": "MIT", "optional": true }, - "node_modules/farmhash-modern": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/farmhash-modern/-/farmhash-modern-1.1.0.tgz", - "integrity": "sha512-6ypT4XfgqJk/F3Yuv4SX26I3doUjt0GTG4a+JgWxXQpxXzTBq8fPUeGHfcYMMDPHJHm3yPOSjaeBwBGAHWXCdA==", - "license": "MIT", - "engines": { - "node": ">=18.0.0" - } - }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", @@ -10523,6 +10042,13 @@ "dev": true, "license": "Apache-2.0" }, + "node_modules/fast-fifo": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz", + "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==", + "dev": true, + "license": "MIT" + }, "node_modules/fast-json-stable-stringify": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", @@ -10560,9 +10086,9 @@ "license": "BSD-3-Clause" }, "node_modules/fast-xml-parser": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.3.2.tgz", - "integrity": "sha512-n8v8b6p4Z1sMgqRmqLJm3awW4NX7NkaKPfb3uJIBTSH7Pdvufi3PQ3/lJLQrvxcMYl7JI2jnDO90siPEpD8JBA==", + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.3.3.tgz", + "integrity": "sha512-2O3dkPAAC6JavuMm8+4+pgTk+5hoAs+CjZ+sWcQLkX9+/tHRuTkQh/Oaifr8qDmZ8iEHb771Ea6G8CdwkrgvYA==", "funding": [ { "type": "github", @@ -10577,18 +10103,6 @@ "fxparser": "src/cli/cli.js" } }, - "node_modules/faye-websocket": { - "version": "0.11.4", - "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.4.tgz", - "integrity": "sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==", - "license": "Apache-2.0", - "dependencies": { - "websocket-driver": ">=0.5.1" - }, - "engines": { - "node": ">=0.8.0" - } - }, "node_modules/fb-watchman": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", @@ -10811,98 +10325,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/firebase-admin": { - "version": "13.6.0", - "resolved": "https://registry.npmjs.org/firebase-admin/-/firebase-admin-13.6.0.tgz", - "integrity": "sha512-GdPA/t0+Cq8p1JnjFRBmxRxAGvF/kl2yfdhALl38PrRp325YxyQ5aNaHui0XmaKcKiGRFIJ/EgBNWFoDP0onjw==", - "license": "Apache-2.0", - "dependencies": { - "@fastify/busboy": "^3.0.0", - "@firebase/database-compat": "^2.0.0", - "@firebase/database-types": "^1.0.6", - "@types/node": "^22.8.7", - "farmhash-modern": "^1.1.0", - "fast-deep-equal": "^3.1.1", - "google-auth-library": "^9.14.2", - "jsonwebtoken": "^9.0.0", - "jwks-rsa": "^3.1.0", - "node-forge": "^1.3.1", - "uuid": "^11.0.2" - }, - "engines": { - "node": ">=18" - }, - "optionalDependencies": { - "@google-cloud/firestore": "^7.11.0", - "@google-cloud/storage": "^7.14.0" - } - }, - "node_modules/firebase-admin/node_modules/gcp-metadata": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.1.tgz", - "integrity": "sha512-a4tiq7E0/5fTjxPAaH4jpjkSv/uCaU2p5KC6HVGrvl0cDjA8iBZv4vv1gyzlmK0ZUKqwpOyQMKzZQe3lTit77A==", - "license": "Apache-2.0", - "dependencies": { - "gaxios": "^6.1.1", - "google-logging-utils": "^0.0.2", - "json-bigint": "^1.0.0" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/firebase-admin/node_modules/google-auth-library": { - "version": "9.15.1", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.15.1.tgz", - "integrity": "sha512-Jb6Z0+nvECVz+2lzSMt9u98UsoakXxA2HGHMCxh+so3n90XgYWkq5dur19JAJV7ONiJY22yBTyJB1TSkvPq9Ng==", - "license": "Apache-2.0", - "dependencies": { - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "gaxios": "^6.1.1", - "gcp-metadata": "^6.1.0", - "gtoken": "^7.0.0", - "jws": "^4.0.0" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/firebase-admin/node_modules/google-logging-utils": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/google-logging-utils/-/google-logging-utils-0.0.2.tgz", - "integrity": "sha512-NEgUnEcBiP5HrPzufUkBzJOD/Sxsco3rLNo1F1TNf7ieU8ryUzBhqba8r756CjLX7rn3fHl6iLEwPYuqpoKgQQ==", - "license": "Apache-2.0", - "engines": { - "node": ">=14" - } - }, - "node_modules/firebase-admin/node_modules/gtoken": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz", - "integrity": "sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==", - "license": "MIT", - "dependencies": { - "gaxios": "^6.0.0", - "jws": "^4.0.0" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/firebase-admin/node_modules/uuid": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz", - "integrity": "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "bin": { - "uuid": "dist/esm/bin/uuid" - } - }, "node_modules/fixpack": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/fixpack/-/fixpack-4.0.0.tgz", @@ -11232,57 +10654,7 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/functional-red-black-tree": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", - "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==", - "license": "MIT", - "optional": true - }, "node_modules/gaxios": { - "version": "6.7.1", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.7.1.tgz", - "integrity": "sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ==", - "license": "Apache-2.0", - "dependencies": { - "extend": "^3.0.2", - "https-proxy-agent": "^7.0.1", - "is-stream": "^2.0.0", - "node-fetch": "^2.6.9", - "uuid": "^9.0.1" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/gaxios/node_modules/uuid": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", - "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "bin": { - "uuid": "dist/bin/uuid" - } - }, - "node_modules/gcp-metadata": { - "version": "8.1.2", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-8.1.2.tgz", - "integrity": "sha512-zV/5HKTfCeKWnxG0Dmrw51hEWFGfcF2xiXqcA3+J90WDuP0SvoiSO5ORvcBsifmx/FoIjgQN3oNOGaQ5PhLFkg==", - "license": "Apache-2.0", - "dependencies": { - "gaxios": "^7.0.0", - "google-logging-utils": "^1.0.0", - "json-bigint": "^1.0.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/gcp-metadata/node_modules/gaxios": { "version": "7.1.3", "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-7.1.3.tgz", "integrity": "sha512-YGGyuEdVIjqxkxVH1pUTMY/XtmmsApXrCVv5EU25iX6inEPbV+VakJfLealkBtJN69AQmh1eGOdCl9Sm1UP6XQ==", @@ -11297,7 +10669,7 @@ "node": ">=18" } }, - "node_modules/gcp-metadata/node_modules/node-fetch": { + "node_modules/gaxios/node_modules/node-fetch": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", @@ -11315,6 +10687,20 @@ "url": "https://opencollective.com/node-fetch" } }, + "node_modules/gcp-metadata": { + "version": "8.1.2", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-8.1.2.tgz", + "integrity": "sha512-zV/5HKTfCeKWnxG0Dmrw51hEWFGfcF2xiXqcA3+J90WDuP0SvoiSO5ORvcBsifmx/FoIjgQN3oNOGaQ5PhLFkg==", + "license": "Apache-2.0", + "dependencies": { + "gaxios": "^7.0.0", + "google-logging-utils": "^1.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=18" + } + }, "node_modules/gensync": { "version": "1.0.0-beta.2", "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", @@ -11516,134 +10902,6 @@ "node": ">=18" } }, - "node_modules/google-auth-library/node_modules/gaxios": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-7.1.3.tgz", - "integrity": "sha512-YGGyuEdVIjqxkxVH1pUTMY/XtmmsApXrCVv5EU25iX6inEPbV+VakJfLealkBtJN69AQmh1eGOdCl9Sm1UP6XQ==", - "license": "Apache-2.0", - "dependencies": { - "extend": "^3.0.2", - "https-proxy-agent": "^7.0.1", - "node-fetch": "^3.3.2", - "rimraf": "^5.0.1" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/google-auth-library/node_modules/node-fetch": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", - "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", - "license": "MIT", - "dependencies": { - "data-uri-to-buffer": "^4.0.0", - "fetch-blob": "^3.1.4", - "formdata-polyfill": "^4.0.10" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/node-fetch" - } - }, - "node_modules/google-gax": { - "version": "4.6.1", - "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-4.6.1.tgz", - "integrity": "sha512-V6eky/xz2mcKfAd1Ioxyd6nmA61gao3n01C+YeuIwu3vzM9EDR6wcVzMSIbLMDXWeoi9SHYctXuKYC5uJUT3eQ==", - "license": "Apache-2.0", - "optional": true, - "dependencies": { - "@grpc/grpc-js": "^1.10.9", - "@grpc/proto-loader": "^0.7.13", - "@types/long": "^4.0.0", - "abort-controller": "^3.0.0", - "duplexify": "^4.0.0", - "google-auth-library": "^9.3.0", - "node-fetch": "^2.7.0", - "object-hash": "^3.0.0", - "proto3-json-serializer": "^2.0.2", - "protobufjs": "^7.3.2", - "retry-request": "^7.0.0", - "uuid": "^9.0.1" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/google-gax/node_modules/gcp-metadata": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.1.tgz", - "integrity": "sha512-a4tiq7E0/5fTjxPAaH4jpjkSv/uCaU2p5KC6HVGrvl0cDjA8iBZv4vv1gyzlmK0ZUKqwpOyQMKzZQe3lTit77A==", - "license": "Apache-2.0", - "optional": true, - "dependencies": { - "gaxios": "^6.1.1", - "google-logging-utils": "^0.0.2", - "json-bigint": "^1.0.0" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/google-gax/node_modules/google-auth-library": { - "version": "9.15.1", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.15.1.tgz", - "integrity": "sha512-Jb6Z0+nvECVz+2lzSMt9u98UsoakXxA2HGHMCxh+so3n90XgYWkq5dur19JAJV7ONiJY22yBTyJB1TSkvPq9Ng==", - "license": "Apache-2.0", - "optional": true, - "dependencies": { - "base64-js": "^1.3.0", - "ecdsa-sig-formatter": "^1.0.11", - "gaxios": "^6.1.1", - "gcp-metadata": "^6.1.0", - "gtoken": "^7.0.0", - "jws": "^4.0.0" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/google-gax/node_modules/google-logging-utils": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/google-logging-utils/-/google-logging-utils-0.0.2.tgz", - "integrity": "sha512-NEgUnEcBiP5HrPzufUkBzJOD/Sxsco3rLNo1F1TNf7ieU8ryUzBhqba8r756CjLX7rn3fHl6iLEwPYuqpoKgQQ==", - "license": "Apache-2.0", - "optional": true, - "engines": { - "node": ">=14" - } - }, - "node_modules/google-gax/node_modules/gtoken": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz", - "integrity": "sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==", - "license": "MIT", - "optional": true, - "dependencies": { - "gaxios": "^6.0.0", - "jws": "^4.0.0" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/google-gax/node_modules/uuid": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", - "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "optional": true, - "bin": { - "uuid": "dist/bin/uuid" - } - }, "node_modules/google-logging-utils": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/google-logging-utils/-/google-logging-utils-1.1.3.tgz", @@ -11715,39 +10973,6 @@ "node": ">=18" } }, - "node_modules/gtoken/node_modules/gaxios": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-7.1.3.tgz", - "integrity": "sha512-YGGyuEdVIjqxkxVH1pUTMY/XtmmsApXrCVv5EU25iX6inEPbV+VakJfLealkBtJN69AQmh1eGOdCl9Sm1UP6XQ==", - "license": "Apache-2.0", - "dependencies": { - "extend": "^3.0.2", - "https-proxy-agent": "^7.0.1", - "node-fetch": "^3.3.2", - "rimraf": "^5.0.1" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/gtoken/node_modules/node-fetch": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", - "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", - "license": "MIT", - "dependencies": { - "data-uri-to-buffer": "^4.0.0", - "fetch-blob": "^3.1.4", - "formdata-polyfill": "^4.0.10" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/node-fetch" - } - }, "node_modules/handlebars": { "version": "4.7.8", "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz", @@ -11859,23 +11084,6 @@ "node": ">=14" } }, - "node_modules/html-entities": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/html-entities/-/html-entities-2.6.0.tgz", - "integrity": "sha512-kig+rMn/QOVRvr7c86gQ8lWXq+Hkv6CbAH1hLu+RG338StTpE8Z0b44SDVaqVu7HGKf27frdmUYEs9hTUX/cLQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/mdevils" - }, - { - "type": "patreon", - "url": "https://patreon.com/mdevils" - } - ], - "license": "MIT", - "optional": true - }, "node_modules/html-escaper": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", @@ -11969,12 +11177,6 @@ "url": "https://opencollective.com/express" } }, - "node_modules/http-parser-js": { - "version": "0.5.10", - "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.10.tgz", - "integrity": "sha512-Pysuw9XpUq5dVc/2SMHpuTY01RFl8fttgcyunjL7eEMhGM3cI4eOmiCycJDVCo/7O7ClfQD3SaI6ftDzqOXYMA==", - "license": "MIT" - }, "node_modules/http-proxy-agent": { "version": "7.0.2", "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", @@ -12037,9 +11239,9 @@ } }, "node_modules/iconv-lite": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.0.tgz", - "integrity": "sha512-cf6L2Ds3h57VVmkZe+Pn+5APsT7FpqJtEhhieDCvrE2MK5Qk9MyffgQyuxQTm6BChfeZNtcOLHp9IcWRVcIcBQ==", + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.1.tgz", + "integrity": "sha512-2Tth85cXwGFHfvRgZWszZSvdo+0Xsqmw8k8ZwxScfcBneNUraK+dxRxRm24nszx80Y0TVio8kKLt5sLE7ZCLlw==", "license": "MIT", "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" @@ -12251,7 +11453,6 @@ "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.8.2.tgz", "integrity": "sha512-C6uC+kleiIMmjViJINWk80sOQw5lEzse1ZmvD+S/s8p8CWapftSaC+kocGTx6xrbrJ4WmYQGC08ffHLr6ToR6Q==", "license": "MIT", - "peer": true, "dependencies": { "@ioredis/commands": "1.4.0", "cluster-key-slot": "^1.1.0", @@ -12465,6 +11666,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "devOptional": true, "license": "MIT", "engines": { "node": ">=8" @@ -12654,7 +11856,6 @@ "integrity": "sha512-F26gjC0yWN8uAA5m5Ss8ZQf5nDHWGlN/xWZIh8S5SRbsEKBovwZhxGd6LJlbZYxBgCYOtreSUyb8hpXyGC5O4A==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@jest/core": "30.2.0", "@jest/types": "30.2.0", @@ -13250,15 +12451,6 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, - "node_modules/jose": { - "version": "4.15.9", - "resolved": "https://registry.npmjs.org/jose/-/jose-4.15.9.tgz", - "integrity": "sha512-1vUQX+IdDMVPj4k8kOxgUqlcK518yluMuGZwqlr44FS1ppZB/5GWh4rZG89erpOBOJjU/OBsnCVFfapsRz6nEA==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/panva" - } - }, "node_modules/js-beautify": { "version": "1.15.4", "resolved": "https://registry.npmjs.org/js-beautify/-/js-beautify-1.15.4.tgz", @@ -13542,68 +12734,6 @@ "safe-buffer": "^5.0.1" } }, - "node_modules/jwks-rsa": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/jwks-rsa/-/jwks-rsa-3.2.0.tgz", - "integrity": "sha512-PwchfHcQK/5PSydeKCs1ylNym0w/SSv8a62DgHJ//7x2ZclCoinlsjAfDxAAbpoTPybOum/Jgy+vkvMmKz89Ww==", - "license": "MIT", - "dependencies": { - "@types/express": "^4.17.20", - "@types/jsonwebtoken": "^9.0.4", - "debug": "^4.3.4", - "jose": "^4.15.4", - "limiter": "^1.1.5", - "lru-memoizer": "^2.2.0" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/jwks-rsa/node_modules/@types/express": { - "version": "4.17.25", - "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.25.tgz", - "integrity": "sha512-dVd04UKsfpINUnK0yBoYHDF3xu7xVH4BuDotC/xGuycx4CgbP48X/KF/586bcObxT0HENHXEU8Nqtu6NR+eKhw==", - "license": "MIT", - "dependencies": { - "@types/body-parser": "*", - "@types/express-serve-static-core": "^4.17.33", - "@types/qs": "*", - "@types/serve-static": "^1" - } - }, - "node_modules/jwks-rsa/node_modules/@types/express-serve-static-core": { - "version": "4.19.7", - "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.19.7.tgz", - "integrity": "sha512-FvPtiIf1LfhzsaIXhv/PHan/2FeQBbtBDtfX2QfvPxdUelMDEckK08SM6nqo1MIZY3RUlfA+HV8+hFUSio78qg==", - "license": "MIT", - "dependencies": { - "@types/node": "*", - "@types/qs": "*", - "@types/range-parser": "*", - "@types/send": "*" - } - }, - "node_modules/jwks-rsa/node_modules/@types/send": { - "version": "0.17.6", - "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.6.tgz", - "integrity": "sha512-Uqt8rPBE8SY0RK8JB1EzVOIZ32uqy8HwdxCnoCOsYrvnswqmFZ/k+9Ikidlk/ImhsdvBsloHbAlewb2IEBV/Og==", - "license": "MIT", - "dependencies": { - "@types/mime": "^1", - "@types/node": "*" - } - }, - "node_modules/jwks-rsa/node_modules/@types/serve-static": { - "version": "1.15.10", - "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.10.tgz", - "integrity": "sha512-tRs1dB+g8Itk72rlSI2ZrW6vZg0YrLI81iQSTkMmOqnqCaNr/8Ek4VwWcN5vZgCYWbg/JJSGBlUaYGAOP73qBw==", - "license": "MIT", - "dependencies": { - "@types/http-errors": "*", - "@types/node": "*", - "@types/send": "<1" - } - }, "node_modules/jws": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz", @@ -13713,11 +12843,6 @@ "license": "MIT", "optional": true }, - "node_modules/limiter": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/limiter/-/limiter-1.1.5.tgz", - "integrity": "sha512-FWWMIEOxz3GwUI4Ts/IvgVy6LPvoMPgjMdQ185nN6psJyBJ4yOpzqm695/h5umdLJg2vW3GR5iG11MAkR2AzJA==" - }, "node_modules/lines-and-columns": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", @@ -13935,12 +13060,6 @@ "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==", "license": "MIT" }, - "node_modules/lodash.clonedeep": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", - "integrity": "sha512-H5ZhCF25riFd9uB5UCkVKo61m3S/xZk1x4wA6yp/L3RFP6Z/eHH1ymQcGLo7J3GMPfm0V/7m1tryHuGVxpqEBQ==", - "license": "MIT" - }, "node_modules/lodash.compact": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/lodash.compact/-/lodash.compact-3.0.1.tgz", @@ -14129,13 +13248,6 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, - "node_modules/long": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz", - "integrity": "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==", - "license": "Apache-2.0", - "optional": true - }, "node_modules/lower-case": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-1.1.4.tgz", @@ -14153,34 +13265,6 @@ "yallist": "^3.0.2" } }, - "node_modules/lru-memoizer": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/lru-memoizer/-/lru-memoizer-2.3.0.tgz", - "integrity": "sha512-GXn7gyHAMhO13WSKrIiNfztwxodVsP8IoZ3XfrJV4yH2x0/OeTO/FIaAHTY5YekdGgW94njfuKmyyt1E0mR6Ug==", - "license": "MIT", - "dependencies": { - "lodash.clonedeep": "^4.5.0", - "lru-cache": "6.0.0" - } - }, - "node_modules/lru-memoizer/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/lru-memoizer/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "license": "ISC" - }, "node_modules/luxon": { "version": "3.7.2", "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.7.2.tgz", @@ -14200,32 +13284,39 @@ } }, "node_modules/mailparser": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/mailparser/-/mailparser-3.9.0.tgz", - "integrity": "sha512-jpaNLhDjwy0w2f8sySOSRiWREjPqssSc0C2czV98btCXCRX3EyNloQ2IWirmMDj1Ies8Fkm0l96bZBZpDG7qkg==", + "version": "3.9.1", + "resolved": "https://registry.npmjs.org/mailparser/-/mailparser-3.9.1.tgz", + "integrity": "sha512-6vHZcco3fWsDMkf4Vz9iAfxvwrKNGbHx0dV1RKVphQ/zaNY34Buc7D37LSa09jeSeybWzYcTPjhiZFxzVRJedA==", "license": "MIT", "optional": true, "dependencies": { - "@zone-eu/mailsplit": "5.4.7", + "@zone-eu/mailsplit": "5.4.8", "encoding-japanese": "2.2.0", "he": "1.2.0", "html-to-text": "9.0.5", "iconv-lite": "0.7.0", "libmime": "5.3.7", "linkify-it": "5.0.0", - "nodemailer": "7.0.10", + "nodemailer": "7.0.11", "punycode.js": "2.3.1", "tlds": "1.261.0" } }, - "node_modules/mailparser/node_modules/nodemailer": { - "version": "7.0.10", - "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-7.0.10.tgz", - "integrity": "sha512-Us/Se1WtT0ylXgNFfyFSx4LElllVLJXQjWi2Xz17xWw7amDKO2MLtFnVp1WACy7GkVGs+oBlRopVNUzlrGSw1w==", - "license": "MIT-0", + "node_modules/mailparser/node_modules/iconv-lite": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.0.tgz", + "integrity": "sha512-cf6L2Ds3h57VVmkZe+Pn+5APsT7FpqJtEhhieDCvrE2MK5Qk9MyffgQyuxQTm6BChfeZNtcOLHp9IcWRVcIcBQ==", + "license": "MIT", "optional": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, "engines": { - "node": ">=6.0.0" + "node": ">=0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/make-dir": { @@ -14362,16 +13453,16 @@ } }, "node_modules/mime": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", - "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==", + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", + "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", + "devOptional": true, "license": "MIT", - "optional": true, "bin": { "mime": "cli.js" }, "engines": { - "node": ">=10.0.0" + "node": ">=4.0.0" } }, "node_modules/mime-db": { @@ -15022,6 +14113,15 @@ "whatwg-url": "^14.1.0 || ^13.0.0" } }, + "node_modules/mongodb-connection-string-url/node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/mongodb-connection-string-url/node_modules/tr46": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz", @@ -15057,11 +14157,10 @@ } }, "node_modules/mongoose": { - "version": "8.20.2", - "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-8.20.2.tgz", - "integrity": "sha512-U0TPupnqBOAI3p9H9qdShX8/nJUBylliRcHFKuhbewEkM7Y0qc9BbrQR9h4q6+1easoZqej7cq2Ee36AZ0gMzQ==", + "version": "8.20.3", + "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-8.20.3.tgz", + "integrity": "sha512-AQk63Ry4YM/lWJRt/D5P7UiRjKT+z+vD0NkNKgeQ35TioBC7kuI6wBzhu6/kyrNXg+WotFidW1icEWLNC1rUfg==", "license": "MIT", - "peer": true, "dependencies": { "bson": "^6.10.4", "kareem": "2.6.3", @@ -15085,6 +14184,7 @@ "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", "license": "MIT", "optional": true, + "peer": true, "dependencies": { "debug": "4" }, @@ -15098,6 +14198,7 @@ "integrity": "sha512-95hVgBRgEIRQQQHIbnxBXeHbW4TqFk4ZDJW7wmVtvYar72FdhRIo1UGOLS2eRAKCPEdPBWu+M7+A33D9CdX9rA==", "license": "Apache-2.0", "optional": true, + "peer": true, "dependencies": { "extend": "^3.0.2", "https-proxy-agent": "^5.0.0", @@ -15129,6 +14230,7 @@ "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", "license": "MIT", "optional": true, + "peer": true, "dependencies": { "agent-base": "6", "debug": "4" @@ -15406,7 +14508,6 @@ "resolved": "https://registry.npmjs.org/commander/-/commander-11.1.0.tgz", "integrity": "sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ==", "license": "MIT", - "peer": true, "engines": { "node": ">=16" } @@ -15513,9 +14614,10 @@ } }, "node_modules/node-forge": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.3.tgz", - "integrity": "sha512-rLvcdSyRCyouf6jcOIPe/BgwG/d7hKjzMKOas33/pHEr6gbq18IK9zV7DiPvzsz0oBJPme6qr6H6kGZuI9/DZg==", + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", + "integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==", + "dev": true, "license": "(BSD-3-Clause OR GPL-2.0)", "engines": { "node": ">= 6.13.0" @@ -15566,7 +14668,6 @@ "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-7.0.11.tgz", "integrity": "sha512-gnXhNRE0FNhD7wPSCGhdNh46Hs6nm+uTyg+Kq0cZukNQiYdnCsoQjodNP9BQVG9XrcK/v6/MgpAPBUFyzh9pvw==", "license": "MIT-0", - "peer": true, "engines": { "node": ">=6.0.0" } @@ -15835,7 +14936,7 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "devOptional": true, + "dev": true, "license": "MIT", "dependencies": { "yocto-queue": "^0.1.0" @@ -16014,7 +15115,6 @@ "resolved": "https://registry.npmjs.org/passport/-/passport-0.7.0.tgz", "integrity": "sha512-cPLl+qZpSc+ireUvt+IzqbED1cHHkDoVYMo30jbJIdOOjQ1MQYZBPiNvmi8UM6lJuOpTPXJGZQk0DtC4y61MYQ==", "license": "MIT", - "peer": true, "dependencies": { "passport-strategy": "1.x.x", "pause": "0.0.1", @@ -16209,7 +15309,6 @@ "resolved": "https://registry.npmjs.org/pg/-/pg-8.16.3.tgz", "integrity": "sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==", "license": "MIT", - "peer": true, "dependencies": { "pg-connection-string": "^2.9.1", "pg-pool": "^3.10.1", @@ -16477,7 +15576,6 @@ "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.7.4.tgz", "integrity": "sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA==", "license": "MIT", - "peer": true, "bin": { "prettier": "bin/prettier.cjs" }, @@ -16597,9 +15695,38 @@ "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz", "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==", "license": "MIT", - "optional": true, - "dependencies": { - "asap": "~2.0.3" + "optional": true, + "dependencies": { + "asap": "~2.0.3" + } + }, + "node_modules/promise-limit": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/promise-limit/-/promise-limit-2.7.0.tgz", + "integrity": "sha512-7nJ6v5lnJsXwGprnGXga4wx6d1POjvi5Qmf1ivTRxTjH4Z/9Czja/UCMLVmB9N93GeWOU93XaFaEt6jbuoagNw==", + "license": "ISC" + }, + "node_modules/promise-retry": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz", + "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", + "license": "MIT", + "dependencies": { + "err-code": "^2.0.2", + "retry": "^0.12.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/properties-file": { + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/properties-file/-/properties-file-3.6.1.tgz", + "integrity": "sha512-9NUyJcxSqdWcJGRpPq6rT7exQbSQMPs0sK6KTvCJsLrTQRwq+hmt/wIB32ugNZmvEuSPyFO+y4nLK3vX34i5Wg==", + "dev": true, + "license": "MIT", + "engines": { + "node": "*" } }, "node_modules/proto-list": { @@ -16609,44 +15736,6 @@ "license": "ISC", "optional": true }, - "node_modules/proto3-json-serializer": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/proto3-json-serializer/-/proto3-json-serializer-2.0.2.tgz", - "integrity": "sha512-SAzp/O4Yh02jGdRc+uIrGoe87dkN/XtwxfZ4ZyafJHymd79ozp5VG5nyZ7ygqPM5+cpLDjjGnYFUkngonyDPOQ==", - "license": "Apache-2.0", - "optional": true, - "dependencies": { - "protobufjs": "^7.2.5" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/protobufjs": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.5.4.tgz", - "integrity": "sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg==", - "hasInstallScript": true, - "license": "BSD-3-Clause", - "optional": true, - "dependencies": { - "@protobufjs/aspromise": "^1.1.2", - "@protobufjs/base64": "^1.1.2", - "@protobufjs/codegen": "^2.0.4", - "@protobufjs/eventemitter": "^1.1.0", - "@protobufjs/fetch": "^1.1.0", - "@protobufjs/float": "^1.0.2", - "@protobufjs/inquire": "^1.1.0", - "@protobufjs/path": "^1.1.2", - "@protobufjs/pool": "^1.1.0", - "@protobufjs/utf8": "^1.1.0", - "@types/node": ">=13.7.0", - "long": "^5.0.0" - }, - "engines": { - "node": ">=12.0.0" - } - }, "node_modules/proxy-addr": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", @@ -16803,13 +15892,10 @@ "optional": true }, "node_modules/punycode": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", - "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", - "license": "MIT", - "engines": { - "node": ">=6" - } + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==", + "license": "MIT" }, "node_modules/punycode.js": { "version": "2.3.1", @@ -17108,30 +16194,14 @@ } }, "node_modules/retry": { - "version": "0.13.1", - "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", - "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==", + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", + "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", "license": "MIT", - "optional": true, "engines": { "node": ">= 4" } }, - "node_modules/retry-request": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz", - "integrity": "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==", - "license": "MIT", - "optional": true, - "dependencies": { - "@types/request": "^2.48.8", - "extend": "^3.0.2", - "teeny-request": "^9.0.0" - }, - "engines": { - "node": ">=14" - } - }, "node_modules/rfdc": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz", @@ -17335,7 +16405,6 @@ "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.2.tgz", "integrity": "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==", "license": "Apache-2.0", - "peer": true, "dependencies": { "tslib": "^2.1.0" } @@ -17433,25 +16502,29 @@ } }, "node_modules/send": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz", - "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/send/-/send-1.2.1.tgz", + "integrity": "sha512-1gnZf7DFcoIcajTjTwjwuDjzuz4PPcY2StKPlsGAQ1+YH20IRVrBaXSWmdjowTJ6u8Rc01PoYOGHXfP1mYcZNQ==", "license": "MIT", "dependencies": { - "debug": "^4.3.5", + "debug": "^4.4.3", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "fresh": "^2.0.0", - "http-errors": "^2.0.0", - "mime-types": "^3.0.1", + "http-errors": "^2.0.1", + "mime-types": "^3.0.2", "ms": "^2.1.3", "on-finished": "^2.4.1", "range-parser": "^1.2.1", - "statuses": "^2.0.1" + "statuses": "^2.0.2" }, "engines": { "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/serialize-javascript": { @@ -17465,9 +16538,9 @@ } }, "node_modules/serve-static": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz", - "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==", + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.1.tgz", + "integrity": "sha512-xRXBn0pPqQTVQiC8wyQrKs2MOlX24zQ0POGaj0kultvoOCstBQM5yvOhAVSUwOMjQtTvsPWoNCHfPGwaaQJhTw==", "license": "MIT", "dependencies": { "encodeurl": "^2.0.0", @@ -17477,6 +16550,10 @@ }, "engines": { "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/set-function-length": { @@ -17736,6 +16813,16 @@ "node": "*" } }, + "node_modules/slugify": { + "version": "1.6.6", + "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.6.tgz", + "integrity": "sha512-h+z7HKHYXj6wJU+AnS/+IH8Uh9fdcX1Lrhg1/VMdf9PwoBQXFcXiAdsy2tSK0P6gKwJLXp02r90ahUCqHk9rrw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, "node_modules/socket.io": { "version": "4.8.1", "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.8.1.tgz", @@ -17871,6 +16958,82 @@ "node": ">= 0.6" } }, + "node_modules/sonarqube-scanner": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/sonarqube-scanner/-/sonarqube-scanner-4.3.2.tgz", + "integrity": "sha512-QI3t+yahqprjh8SWBMwQOEKLzrh35p5MQGyoIS8xm3wR2Q/CaQQeK4TEWpxGsLh2mpn1L1jNSHehSYuWdCpcvw==", + "dev": true, + "dependencies": { + "adm-zip": "0.5.16", + "axios": "1.12.2", + "commander": "13.1.0", + "fs-extra": "11.3.2", + "hpagent": "1.2.0", + "node-forge": "1.3.1", + "properties-file": "3.6.1", + "proxy-from-env": "1.1.0", + "semver": "7.7.2", + "slugify": "1.6.6", + "tar-stream": "3.1.7" + }, + "bin": { + "sonar": "bin/sonar-scanner.js", + "sonar-scanner": "bin/sonar-scanner.js" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/sonarqube-scanner/node_modules/axios": { + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz", + "integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.4", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/sonarqube-scanner/node_modules/commander": { + "version": "13.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-13.1.0.tgz", + "integrity": "sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/sonarqube-scanner/node_modules/fs-extra": { + "version": "11.3.2", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.2.tgz", + "integrity": "sha512-Xr9F6z6up6Ws+NjzMCZc6WXg2YFRlrLP9NQDO3VQrWrfiojdhS56TzueT88ze0uBdCTwEIhQ3ptnmKeWGFAe0A==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, + "node_modules/sonarqube-scanner/node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/source-map": { "version": "0.7.4", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", @@ -18009,23 +17172,6 @@ "node": ">= 0.8" } }, - "node_modules/stream-events": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz", - "integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==", - "license": "MIT", - "optional": true, - "dependencies": { - "stubs": "^3.0.0" - } - }, - "node_modules/stream-shift": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz", - "integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==", - "license": "MIT", - "optional": true - }, "node_modules/streamsearch": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz", @@ -18034,6 +17180,18 @@ "node": ">=10.0.0" } }, + "node_modules/streamx": { + "version": "2.23.0", + "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.23.0.tgz", + "integrity": "sha512-kn+e44esVfn2Fa/O0CPFcex27fjIL6MkVae0Mm6q+E6f0hWv578YCERbv+4m02cjxvDsPKLnmxral/rR6lBMAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "events-universal": "^1.0.0", + "fast-fifo": "^1.3.2", + "text-decoder": "^1.1.0" + } + }, "node_modules/string_decoder": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", @@ -18260,9 +17418,9 @@ } }, "node_modules/strnum": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/strnum/-/strnum-2.1.1.tgz", - "integrity": "sha512-7ZvoFTiCnGxBtDqJ//Cu6fWtZtc7Y3x+QOirG15wztbdngGSkht27o2pyGWrVy0b4WAy3jbKmnoK6g5VlVNUUw==", + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-2.1.2.tgz", + "integrity": "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ==", "funding": [ { "type": "github", @@ -18287,13 +17445,6 @@ "url": "https://github.com/sponsors/Borewit" } }, - "node_modules/stubs": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", - "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==", - "license": "MIT", - "optional": true - }, "node_modules/superagent": { "version": "10.2.3", "resolved": "https://registry.npmjs.org/superagent/-/superagent-10.2.3.tgz", @@ -18315,19 +17466,6 @@ "node": ">=14.18.0" } }, - "node_modules/superagent/node_modules/mime": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", - "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", - "dev": true, - "license": "MIT", - "bin": { - "mime": "cli.js" - }, - "engines": { - "node": ">=4.0.0" - } - }, "node_modules/supertest": { "version": "7.1.4", "resolved": "https://registry.npmjs.org/supertest/-/supertest-7.1.4.tgz", @@ -18444,77 +17582,16 @@ "url": "https://opencollective.com/webpack" } }, - "node_modules/teeny-request": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz", - "integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==", - "license": "Apache-2.0", - "optional": true, - "dependencies": { - "http-proxy-agent": "^5.0.0", - "https-proxy-agent": "^5.0.0", - "node-fetch": "^2.6.9", - "stream-events": "^1.0.5", - "uuid": "^9.0.0" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/teeny-request/node_modules/agent-base": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", - "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", - "license": "MIT", - "optional": true, - "dependencies": { - "debug": "4" - }, - "engines": { - "node": ">= 6.0.0" - } - }, - "node_modules/teeny-request/node_modules/http-proxy-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", - "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", - "license": "MIT", - "optional": true, - "dependencies": { - "@tootallnate/once": "2", - "agent-base": "6", - "debug": "4" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/teeny-request/node_modules/https-proxy-agent": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", - "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "node_modules/tar-stream": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz", + "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==", + "dev": true, "license": "MIT", - "optional": true, "dependencies": { - "agent-base": "6", - "debug": "4" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/teeny-request/node_modules/uuid": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", - "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "optional": true, - "bin": { - "uuid": "dist/bin/uuid" + "b4a": "^1.6.4", + "fast-fifo": "^1.2.0", + "streamx": "^2.15.0" } }, "node_modules/terser": { @@ -18537,9 +17614,9 @@ } }, "node_modules/terser-webpack-plugin": { - "version": "5.3.15", - "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.15.tgz", - "integrity": "sha512-PGkOdpRFK+rb1TzVz+msVhw4YMRT9txLF4kRqvJhGhCM324xuR3REBSHALN+l+sAhKUmz0aotnjp5D+P83mLhQ==", + "version": "5.3.16", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.16.tgz", + "integrity": "sha512-h9oBFCWrq78NyWWVcSwZarJkZ01c2AyGrzs1crmHZO3QUg9D61Wu4NPjBy69n7JqylFF5y+CsUZYmYEIZ3mR+Q==", "dev": true, "license": "MIT", "dependencies": { @@ -18577,7 +17654,6 @@ "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", @@ -18722,6 +17798,16 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/text-decoder": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.2.3.tgz", + "integrity": "sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "b4a": "^1.6.4" + } + }, "node_modules/through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", @@ -18948,7 +18034,6 @@ "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", "devOptional": true, "license": "MIT", - "peer": true, "dependencies": { "@cspotcode/source-map-support": "^0.8.0", "@tsconfig/node10": "^1.0.7", @@ -19058,6 +18143,23 @@ "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==", "license": "Unlicense" }, + "node_modules/twemoji-parser": { + "version": "11.0.2", + "resolved": "https://registry.npmjs.org/twemoji-parser/-/twemoji-parser-11.0.2.tgz", + "integrity": "sha512-5kO2XCcpAql6zjdLwRwJjYvAZyDy3+Uj7v1ipBzLthQmDL7Ce19bEqHr3ImSNeoSW2OA8u02XmARbXHaNO8GhA==", + "license": "MIT" + }, + "node_modules/twitter-text": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/twitter-text/-/twitter-text-3.1.0.tgz", + "integrity": "sha512-nulfUi3FN6z0LUjYipJid+eiwXvOLb8Ass7Jy/6zsXmZK3URte043m8fL3FyDzrK+WLpyqhHuR/TcARTN/iuGQ==", + "dependencies": { + "@babel/runtime": "^7.3.1", + "core-js": "^2.5.0", + "punycode": "1.4.1", + "twemoji-parser": "^11.0.2" + } + }, "node_modules/type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", @@ -19132,7 +18234,6 @@ "resolved": "https://registry.npmjs.org/typeorm/-/typeorm-0.3.28.tgz", "integrity": "sha512-6GH7wXhtfq2D33ZuRXYwIsl/qM5685WZcODZb7noOOcRMteM9KF2x2ap3H0EBjnSV0VO4gNAfJT5Ukp0PkOlvg==", "license": "MIT", - "peer": true, "dependencies": { "@sqltools/formatter": "^1.2.5", "ansis": "^4.2.0", @@ -19344,7 +18445,6 @@ "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", "devOptional": true, "license": "Apache-2.0", - "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -19354,16 +18454,16 @@ } }, "node_modules/typescript-eslint": { - "version": "8.49.0", - "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.49.0.tgz", - "integrity": "sha512-zRSVH1WXD0uXczCXw+nsdjGPUdx4dfrs5VQoHnUWmv1U3oNlAKv4FUNdLDhVUg+gYn+a5hUESqch//Rv5wVhrg==", + "version": "8.50.0", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.50.0.tgz", + "integrity": "sha512-Q1/6yNUmCpH94fbgMUMg2/BSAr/6U7GBk61kZTv1/asghQOWOjTlp9K8mixS5NcJmm2creY+UFfGeW/+OcA64A==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/eslint-plugin": "8.49.0", - "@typescript-eslint/parser": "8.49.0", - "@typescript-eslint/typescript-estree": "8.49.0", - "@typescript-eslint/utils": "8.49.0" + "@typescript-eslint/eslint-plugin": "8.50.0", + "@typescript-eslint/parser": "8.50.0", + "@typescript-eslint/typescript-estree": "8.50.0", + "@typescript-eslint/utils": "8.50.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -19553,6 +18653,16 @@ "punycode": "^2.1.0" } }, + "node_modules/uri-js/node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/url-parse": { "version": "1.5.10", "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", @@ -19802,19 +18912,6 @@ "url": "https://github.com/fb55/htmlparser2?sponsor=1" } }, - "node_modules/web-resource-inliner/node_modules/mime": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", - "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", - "license": "MIT", - "optional": true, - "bin": { - "mime": "cli.js" - }, - "engines": { - "node": ">=4.0.0" - } - }, "node_modules/web-streams-polyfill": { "version": "4.0.0-beta.3", "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz", @@ -19836,7 +18933,6 @@ "integrity": "sha512-HU1JOuV1OavsZ+mfigY0j8d1TgQgbZ6M+J75zDkpEAwYeXjWSqrGJtgnPblJjd/mAyTNQ7ygw0MiKOn6etz8yw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@types/eslint-scope": "^3.7.7", "@types/estree": "^1.0.8", @@ -19906,7 +19002,6 @@ "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", @@ -20023,29 +19118,6 @@ "url": "https://opencollective.com/webpack" } }, - "node_modules/websocket-driver": { - "version": "0.7.4", - "resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz", - "integrity": "sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==", - "license": "Apache-2.0", - "dependencies": { - "http-parser-js": ">=0.5.1", - "safe-buffer": ">=5.1.0", - "websocket-extensions": ">=0.1.1" - }, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/websocket-extensions": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz", - "integrity": "sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==", - "license": "Apache-2.0", - "engines": { - "node": ">=0.8.0" - } - }, "node_modules/whatwg-url": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", @@ -20369,7 +19441,6 @@ "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", "license": "MIT", - "peer": true, "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", @@ -20406,7 +19477,7 @@ "version": "0.1.0", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", - "devOptional": true, + "dev": true, "license": "MIT", "engines": { "node": ">=10" diff --git a/package.json b/package.json index f26998c1..5dac6444 100644 --- a/package.json +++ b/package.json @@ -30,7 +30,8 @@ "seed": "ts-node -r tsconfig-paths/register src/databases/seeds/scripts/seed.ts", "es:seed": "ts-node -r tsconfig-paths/register src/elasticsearch/scripts/es-seed.ts", "es:reset": "ts-node -r tsconfig-paths/register src/elasticsearch/scripts/es-reset.ts", - "generate-encryption-key": "node -r ts-node/register src/shared/services/encryption/generate-encryption-key.ts" + "generate-encryption-key": "node -r ts-node/register src/shared/services/encryption/generate-encryption-key.ts", + "sonar": "npm run test:cov && sonar-scanner" }, "lint-staged": { "*.ts": [ @@ -55,8 +56,8 @@ "@nestjs/mongoose": "^11.0.3", "@nestjs/passport": "^11.0.5", "@nestjs/platform-express": "^11.0.1", - "@nestjs/schedule": "^6.0.1", "@nestjs/platform-socket.io": "^11.1.9", + "@nestjs/schedule": "^6.0.1", "@nestjs/swagger": "^11.2.0", "@nestjs/typeorm": "^11.0.0", "@nestjs/websockets": "^11.1.9", @@ -70,7 +71,7 @@ "class-transformer": "^0.5.1", "class-validator": "^0.14.2", "cookie-parser": "^1.4.7", - "firebase-admin": "^13.6.0", + "expo-server-sdk": "^4.0.0", "fluent-ffmpeg": "^2.1.3", "google-auth-library": "^10.4.1", "groq-sdk": "^0.37.0", @@ -95,6 +96,7 @@ "socket.io": "^4.8.1", "swagger-ui-express": "^5.0.1", "tunnel-ssh": "^5.2.0", + "twitter-text": "^3.1.0", "typeorm": "^0.3.26", "xlsx": "^0.18.5" }, @@ -115,6 +117,7 @@ "@types/passport-github2": "^1.2.9", "@types/supertest": "^6.0.2", "@types/tunnel-ssh": "^5.0.4", + "@types/twitter-text": "^3.1.10", "eslint": "^9.18.0", "eslint-config-prettier": "^10.0.1", "eslint-plugin-prettier": "^5.2.2", @@ -123,6 +126,7 @@ "jest": "^30.0.0", "lint-staged": "^16.2.4", "prettier": "^3.4.2", + "sonarqube-scanner": "^4.3.2", "source-map-support": "^0.5.21", "supertest": "^7.0.0", "ts-jest": "^29.2.5", @@ -164,6 +168,10 @@ "!**/enums/**", "!**/migrations/**", "!**/seeds/**", + "!**/*.module.ts", + "!**/*.config.ts", + "!**/config/**", + "!**/constants/**", "!main.ts", "!**/*.spec.ts", "!**/*-key.ts", @@ -175,4 +183,4 @@ "^src/(.*)$": "/$1" } } -} +} \ No newline at end of file diff --git a/simple-socket-test.html b/simple-socket-test.html index 08038601..defdd2fa 100644 --- a/simple-socket-test.html +++ b/simple-socket-test.html @@ -489,6 +489,7 @@

Event Logs

const message = { content: content, message_type: messageType, + image_url: "https://yapperdev.blob.core.windows.net/profile-images/test-team-1765575149782-standard.jpg", }; if (messageType === "reply" && replyToId) { diff --git a/sonar-project.properties b/sonar-project.properties new file mode 100644 index 00000000..3fe32ee5 --- /dev/null +++ b/sonar-project.properties @@ -0,0 +1,27 @@ +# SonarQube Configuration +sonar.projectKey=x-backend-replica +sonar.projectName=X Backend Replica +sonar.projectVersion=1.0 + +# Source code location +sonar.sources=src +sonar.tests=src +sonar.test.inclusions=**/*.spec.ts + +# Exclude files from analysis +sonar.exclusions=**/node_modules/**,**/dist/**,**/coverage/**,**/*.spec.ts,**/migrations/**,**/seeds/**,**/databases/**,**/*.swagger.ts + +# Exclude infrastructure code from coverage (DTOs, Entities, Modules, Configs) +sonar.coverage.exclusions=**/*.dto.ts,**/*.entity.ts,**/*.module.ts,**/config/**,**/migrations/**,**/seeds/**,**/databases/**,**/*.config.ts,**/constants/**,**/*.interface.ts,**/*.enum.ts + +# TypeScript specific settings +sonar.typescript.lcov.reportPaths=coverage/lcov.info + +# Encoding +sonar.sourceEncoding=UTF-8 + +# SonarQube server URL (default local) +sonar.host.url=http://localhost:9000 + +# Authentication (you'll need to generate a token after SonarQube starts) +sonar.login=squ_3ee91cb3e490cdd73f98c3640cd764b17b18b912 diff --git a/src/app.module.ts b/src/app.module.ts index 66e7f740..51d6cddf 100644 --- a/src/app.module.ts +++ b/src/app.module.ts @@ -27,7 +27,7 @@ import { Tweet } from './tweets/entities/tweet.entity'; import { UserFollows } from './user/entities/user-follows.entity'; import { TweetLike } from './tweets/entities/tweet-like.entity'; import { TweetReply } from './tweets/entities/tweet-reply.entity'; -import { FcmModule } from './fcm/fcm.module'; +import { FcmModule } from './expo/expo.module'; import { TrendModule } from './trend/trend.module'; import { ScheduleModule } from '@nestjs/schedule'; diff --git a/src/app.service.ts b/src/app.service.ts index 6bf65c1e..e1e89977 100644 --- a/src/app.service.ts +++ b/src/app.service.ts @@ -189,7 +189,6 @@ export class AppService { let replies_count = 0; for (const reply_data of TestDataConstants.TEST_REPLIES) { const replier = created_users[reply_data.replier_index]; - const original_user = created_users[reply_data.original_user_index]; const original_tweet = all_tweets[reply_data.original_user_index][reply_data.original_tweet_index]; diff --git a/src/auth/auth.controller.ts b/src/auth/auth.controller.ts index ce0ce091..598d7547 100644 --- a/src/auth/auth.controller.ts +++ b/src/auth/auth.controller.ts @@ -406,7 +406,8 @@ export class AuthController { @ApiResponse(google_oauth_swagger.responses.success) @ApiResponse(google_oauth_swagger.responses.InternalServerError) @Get('google') - googleLogin() {} + // eslint-disable-next-line @typescript-eslint/no-empty-function + googleLogin() {} // Intentionally empty - GoogleAuthGuard handles the OAuth redirect @ApiOperation(google_mobile_swagger.operation) @ApiBody({ type: MobileGoogleAuthDto }) @@ -505,7 +506,8 @@ export class AuthController { @ApiResponse(facebook_oauth_swagger.responses.success) @ApiResponse(facebook_oauth_swagger.responses.InternalServerError) @Get('facebook') - facebookLogin() {} + // eslint-disable-next-line @typescript-eslint/no-empty-function + facebookLogin() {} // Intentionally empty - FacebookAuthGuard handles the OAuth redirect @UseGuards(FacebookAuthGuard) @ApiOperation(facebook_callback_swagger.operation) @@ -561,7 +563,8 @@ export class AuthController { @ApiResponse(github_oauth_swagger.responses.success) @ApiResponse(github_oauth_swagger.responses.InternalServerError) @Get('github') - async githubLogin() {} + // eslint-disable-next-line @typescript-eslint/no-empty-function + async githubLogin() {} // Intentionally empty - GitHubAuthGuard handles the OAuth redirect @ApiOperation(github_mobile_swagger.operation) @ApiBody({ type: MobileGitHubAuthDto }) diff --git a/src/auth/auth.service.ts b/src/auth/auth.service.ts index 1e2cc7eb..753a0fcf 100644 --- a/src/auth/auth.service.ts +++ b/src/auth/auth.service.ts @@ -137,11 +137,11 @@ export class AuthService { const { name, birth_date, email, captcha_token } = dto; // Verify CAPTCHA first - // try { - // await this.captcha_service.validateCaptcha(captcha_token); - // } catch (error) { - // throw new BadRequestException(ERROR_MESSAGES.CAPTCHA_VERIFICATION_FAILED); - // } + try { + await this.captcha_service.validateCaptcha(captcha_token); + } catch (error) { + throw new BadRequestException(ERROR_MESSAGES.CAPTCHA_VERIFICATION_FAILED); + } const existing_user = await this.user_repository.findByEmail(email); if (existing_user) { @@ -343,7 +343,7 @@ export class AuthService { } async sendResetPasswordEmail(identifier: string) { - const { identifier_type, user_id } = await this.checkIdentifier(identifier); + const { user_id } = await this.checkIdentifier(identifier); const user = await this.user_repository.findById(user_id); if (!user) { throw new NotFoundException(ERROR_MESSAGES.USER_NOT_FOUND); @@ -371,7 +371,7 @@ export class AuthService { } async verifyResetPasswordOtp(identifier: string, token: string) { - const { identifier_type, user_id } = await this.checkIdentifier(identifier); + const { user_id } = await this.checkIdentifier(identifier); const is_valid = await this.verification_service.validateOtp(user_id, token, 'password'); if (!is_valid) { @@ -412,7 +412,7 @@ export class AuthService { } async resetPassword(identifier: string, new_password: string, token: string) { - const { identifier_type, user_id } = await this.checkIdentifier(identifier); + const { user_id } = await this.checkIdentifier(identifier); const token_data = await this.verification_service.validatePasswordResetToken(token); if (!token_data) { diff --git a/src/auth/guards/jwt.guard.ts b/src/auth/guards/jwt.guard.ts index e0ad8021..0a455fc4 100644 --- a/src/auth/guards/jwt.guard.ts +++ b/src/auth/guards/jwt.guard.ts @@ -1,10 +1,36 @@ import { ExecutionContext, Injectable, UnauthorizedException } from '@nestjs/common'; import { AuthGuard } from '@nestjs/passport'; +import { RedisService } from 'src/redis/redis.service'; @Injectable() export class JwtAuthGuard extends AuthGuard('jwt') { - override canActivate(context: ExecutionContext) { - return super.canActivate(context); + constructor(private readonly redis_service: RedisService) { + super(); + } + + override async canActivate(context: ExecutionContext) { + const can_activate = await super.canActivate(context); + + if (!can_activate) { + return false; + } + + const request = context.switchToHttp().getRequest(); + const user = request.user; + + let is_deleted = false; + if (user) { + try { + is_deleted = await this.redis_service.exists(`deleted_user:${user.id}`); + } catch (error) { + console.warn('Failed to check deleted user in Redis:', error.message); + } + if (is_deleted) { + throw new UnauthorizedException('User account has been deleted'); + } + } + + return true; } override handleRequest(err: any, user: any, info: any) { diff --git a/src/auth/guards/ws-jwt.guard.ts b/src/auth/guards/ws-jwt.guard.ts index 7636e5f2..81d458b6 100644 --- a/src/auth/guards/ws-jwt.guard.ts +++ b/src/auth/guards/ws-jwt.guard.ts @@ -13,7 +13,7 @@ interface IAuthenticatedSocket extends Socket { @Injectable() export class WsJwtGuard implements CanActivate { - constructor(private jwt_service: JwtService) {} + constructor(private readonly jwt_service: JwtService) {} async canActivate(context: ExecutionContext): Promise { try { diff --git a/src/auth/strategies/facebook.strategy.ts b/src/auth/strategies/facebook.strategy.ts index 701d5807..fe161eaa 100644 --- a/src/auth/strategies/facebook.strategy.ts +++ b/src/auth/strategies/facebook.strategy.ts @@ -8,8 +8,8 @@ import { FacebookLoginDTO } from '../dto/facebook-login.dto'; @Injectable() export class FacebookStrategy extends PassportStrategy(Strategy) { constructor( - private config_service: ConfigService, - private auth_service: AuthService + private readonly config_service: ConfigService, + private readonly auth_service: AuthService ) { super({ clientID: config_service.get('FACEBOOK_CLIENT_ID') || '', diff --git a/src/auth/strategies/github.strategy.ts b/src/auth/strategies/github.strategy.ts index 988d31a4..ef7b4c01 100644 --- a/src/auth/strategies/github.strategy.ts +++ b/src/auth/strategies/github.strategy.ts @@ -8,8 +8,8 @@ import { GitHubUserDto } from '../dto/github-user.dto'; @Injectable() export class GitHubStrategy extends PassportStrategy(Strategy, 'github') { constructor( - private config_service: ConfigService, - private auth_service: AuthService + private readonly config_service: ConfigService, + private readonly auth_service: AuthService ) { super({ clientID: config_service.get('GITHUB_CLIENT_ID') || '', diff --git a/src/auth/strategies/google.strategy.ts b/src/auth/strategies/google.strategy.ts index bb1657d3..f671fe60 100644 --- a/src/auth/strategies/google.strategy.ts +++ b/src/auth/strategies/google.strategy.ts @@ -7,8 +7,8 @@ import { AuthService } from '../auth.service'; @Injectable() export class GoogleStrategy extends PassportStrategy(Strategy) { constructor( - private config_service: ConfigService, - private auth_service: AuthService + private readonly config_service: ConfigService, + private readonly auth_service: AuthService ) { super({ clientID: config_service.get('GOOGLE_CLIENT_ID') || '', diff --git a/src/auth/username.service.ts b/src/auth/username.service.ts index 4600a95f..26295a99 100644 --- a/src/auth/username.service.ts +++ b/src/auth/username.service.ts @@ -104,8 +104,8 @@ export class UsernameService { private cleanName(name: string): string { return name - .replace(/[^a-zA-Z0-9]/g, '') // Remove special characters - .replace(/\s+/g, ''); // Remove spaces + .replaceAll(/[^a-zA-Z0-9]/g, '') // Remove special characters + .replaceAll(/\s+/g, ''); // Remove spaces } private truncateToMaxLength(str: string): string { diff --git a/src/azure-storage/azure-storage.service.ts b/src/azure-storage/azure-storage.service.ts index 5208436c..2ebdacef 100644 --- a/src/azure-storage/azure-storage.service.ts +++ b/src/azure-storage/azure-storage.service.ts @@ -8,7 +8,7 @@ export class AzureStorageService implements OnModuleInit { private blob_service_client: BlobServiceClient; private profile_image_container_name: string; - constructor(private configService: ConfigService) {} + constructor(private readonly configService: ConfigService) {} onModuleInit() { const connection_string = this.configService.get('AZURE_STORAGE_CONNECTION_STRING'); diff --git a/src/background-jobs/ai-summary/ai-summary.processor.spec.ts b/src/background-jobs/ai-summary/ai-summary.processor.spec.ts index bb0c499e..a104bd9a 100644 --- a/src/background-jobs/ai-summary/ai-summary.processor.spec.ts +++ b/src/background-jobs/ai-summary/ai-summary.processor.spec.ts @@ -130,7 +130,7 @@ describe('AiSummaryProcessor', () => { mock_tweet_summary_repository.save.mockRejectedValue(new Error('Save Error')); await expect(processor.handleGenerateSummary(mock_job)).rejects.toThrow(); - }); + }, 10000); it('should process job data correctly', async () => { const existing_summary = { diff --git a/src/background-jobs/ai-summary/ai-summary.service.ts b/src/background-jobs/ai-summary/ai-summary.service.ts index b6d92bb6..828a5b77 100644 --- a/src/background-jobs/ai-summary/ai-summary.service.ts +++ b/src/background-jobs/ai-summary/ai-summary.service.ts @@ -1,9 +1,10 @@ -import { Injectable } from '@nestjs/common'; import { InjectQueue } from '@nestjs/bull'; -import type { Queue } from 'bull'; import { BackgroundJobsService } from '../background-jobs'; -import { JOB_DELAYS, JOB_NAMES, JOB_PRIORITIES, QUEUE_NAMES } from '../constants/queue.constants'; -import type { GenerateTweetSummaryDto } from './ai-summary.dto'; +import { QUEUE_NAMES } from '../constants/queue.constants'; +import { Injectable, Logger } from '@nestjs/common'; +import type { Queue } from 'bull'; +import { JOB_DELAYS, JOB_NAMES, JOB_PRIORITIES } from '../constants/queue.constants'; +import { GenerateTweetSummaryDto } from './ai-summary.dto'; @Injectable() export class AiSummaryJobService extends BackgroundJobsService { @@ -19,12 +20,32 @@ export class AiSummaryJobService extends BackgroundJobsService TimelineModule), ], controllers: [ExploreController, EmailJobsController], providers: [ @@ -192,6 +213,11 @@ import { TrendModule } from 'src/trend/trend.module'; AiSummaryProcessor, HashtagJobService, HashtagProcessor, + InitTimelineQueueJobService, + RefillTimelineQueueJobService, + CleanupOldTweetsJobService, + TimelineProcessor, + TimelineCron, ], exports: [ @@ -220,6 +246,9 @@ import { TrendModule } from 'src/trend/trend.module'; EsFollowJobService, CompressVideoJobService, AiSummaryJobService, + InitTimelineQueueJobService, + RefillTimelineQueueJobService, + CleanupOldTweetsJobService, ], }) export class BackgroundJobsModule {} diff --git a/src/background-jobs/background-jobs.ts b/src/background-jobs/background-jobs.ts index a3fa82a3..ce96c338 100644 --- a/src/background-jobs/background-jobs.ts +++ b/src/background-jobs/background-jobs.ts @@ -1,9 +1,8 @@ import { Logger } from '@nestjs/common'; import type { Queue } from 'bull'; -import { JOB_NAMES } from './constants/queue.constants'; export abstract class BackgroundJobsService { - private readonly logger = new Logger(BackgroundJobsService.name); + protected readonly logger = new Logger(BackgroundJobsService.name); constructor( protected readonly queue: Queue, @@ -19,12 +18,7 @@ export abstract class BackgroundJobsService { error_message_prefix: string ) { try { - let job_id: string | undefined = undefined; - if (this.job_name === JOB_NAMES.AI_SUMMARY.GENERATE_TWEET_SUMMARY) { - job_id = `tweet-summary:${dto['tweet_id']}`; - } const job = await this.queue.add(this.job_name, dto, { - jobId: job_id, priority, delay, attempts: 3, diff --git a/src/background-jobs/constants/queue.constants.ts b/src/background-jobs/constants/queue.constants.ts index 0157a6da..0206487a 100644 --- a/src/background-jobs/constants/queue.constants.ts +++ b/src/background-jobs/constants/queue.constants.ts @@ -15,9 +15,6 @@ export const JOB_NAMES = { EMAIL: { SEND_OTP: 'send-otp-email', }, - TIMELINE: { - PREPARE_FEED: 'prepare-user-feed', - }, FEED: { INDEX_TWEET: 'index-tweet-to-elastic', }, @@ -47,6 +44,11 @@ export const JOB_NAMES = { AI_SUMMARY: { GENERATE_TWEET_SUMMARY: 'generate-tweet-summary', }, + TIMELINE: { + INIT_QUEUE: 'init-timeline-queue', + REFILL_QUEUE: 'refill-timeline-queue', + CLEANUP_OLD_TWEETS: 'cleanup-old-tweets', + }, HASHTAG: { UPDATE_HASHTAG: 'update-hashtag', }, @@ -78,10 +80,12 @@ export const EXPLORE_CONFIG = { DEFAULT_SINCE_HOURS: 1, DEFAULT_BATCH_SIZE: 500, MIN_SCORE_THRESHOLD: 0.001, - MAX_CATEGORY_SIZE: 20, + MAX_CATEGORY_SIZE: 50, } as const; export const EXPLORE_CRON_SCHEDULE = '30 * * * *'; // Every hour at minute 30 +export const TREND_CRON_SCHEDULE = '0 * * * *'; // Every hour at minute 0 +export const FAKE_TREND_CRON_SCHEDULE = '*/20 * * * *'; // Every 20 minutes export const EXPLORE_JOB_PRIORITIES = { HIGH: 1, diff --git a/src/background-jobs/elasticsearch/dtos/es-delete-tweets.dto.ts b/src/background-jobs/elasticsearch/dtos/es-delete-tweets.dto.ts new file mode 100644 index 00000000..cf8548ad --- /dev/null +++ b/src/background-jobs/elasticsearch/dtos/es-delete-tweets.dto.ts @@ -0,0 +1,3 @@ +export class EsDeleteTweetsDto { + tweet_ids: string[]; +} diff --git a/src/background-jobs/elasticsearch/es-delete-tweet.service.spec.ts b/src/background-jobs/elasticsearch/es-delete-tweet.service.spec.ts index 6ce996d8..d70834a3 100644 --- a/src/background-jobs/elasticsearch/es-delete-tweet.service.spec.ts +++ b/src/background-jobs/elasticsearch/es-delete-tweet.service.spec.ts @@ -3,7 +3,7 @@ import { EsDeleteTweetJobService } from './es-delete-tweet.service'; import { getQueueToken } from '@nestjs/bull'; import { QUEUE_NAMES } from '../constants/queue.constants'; import type { Queue } from 'bull'; -import { EsSyncTweetDto } from './dtos/es-sync-tweet.dto'; +import { EsDeleteTweetsDto } from './dtos/es-delete-tweets.dto'; describe('EsDeleteTweetJobService', () => { let service: EsDeleteTweetJobService; @@ -37,7 +37,7 @@ describe('EsDeleteTweetJobService', () => { describe('queueDeleteTweet', () => { it('should queue a delete tweet job successfully', async () => { - const dto = { tweet_id: 'tweet-123' }; + const dto = { tweet_ids: ['tweet-123', 'tweet-321'] }; const mock_job = { id: 'job-123', data: dto }; mock_queue.add.mockResolvedValue(mock_job as any); @@ -56,7 +56,7 @@ describe('EsDeleteTweetJobService', () => { }); it('should queue job with custom priority and delay', async () => { - const dto = { tweet_id: 'tweet-123' }; + const dto = { tweet_ids: ['tweet-123', 'tweet-321'] }; const custom_priority = 5; const custom_delay = 1000; const mock_job = { id: 'job-123', data: dto }; @@ -76,7 +76,7 @@ describe('EsDeleteTweetJobService', () => { }); it('should handle queue errors', async () => { - const dto: EsSyncTweetDto = { tweet_id: 'tweet-123' }; + const dto: EsDeleteTweetsDto = { tweet_ids: ['tweet-123', 'tweet-321'] }; const error = new Error('Queue error'); mock_queue.add.mockRejectedValue(error); diff --git a/src/background-jobs/elasticsearch/es-delete-tweet.service.ts b/src/background-jobs/elasticsearch/es-delete-tweet.service.ts index 54e7697c..4a2a25ad 100644 --- a/src/background-jobs/elasticsearch/es-delete-tweet.service.ts +++ b/src/background-jobs/elasticsearch/es-delete-tweet.service.ts @@ -1,13 +1,15 @@ -import { Injectable, Logger } from '@nestjs/common'; +import { Injectable } from '@nestjs/common'; import { InjectQueue } from '@nestjs/bull'; import type { Queue } from 'bull'; import { JOB_DELAYS, JOB_NAMES, JOB_PRIORITIES, QUEUE_NAMES } from '../constants/queue.constants'; import { BackgroundJobsService } from 'src/background-jobs/background-jobs'; -import { EsSyncTweetDto } from './dtos/es-sync-tweet.dto'; +import { EsDeleteTweetsDto } from './dtos/es-delete-tweets.dto'; @Injectable() -export class EsDeleteTweetJobService extends BackgroundJobsService { - constructor(@InjectQueue(QUEUE_NAMES.ELASTICSEARCH) private elasticsearch_queue: Queue) { +export class EsDeleteTweetJobService extends BackgroundJobsService { + constructor( + @InjectQueue(QUEUE_NAMES.ELASTICSEARCH) private readonly elasticsearch_queue: Queue + ) { super( elasticsearch_queue, JOB_NAMES.ELASTICSEARCH.DELETE_TWEET, @@ -16,7 +18,7 @@ export class EsDeleteTweetJobService extends BackgroundJobsService { - constructor(@InjectQueue(QUEUE_NAMES.ELASTICSEARCH) private elasticsearch_queue: Queue) { + constructor( + @InjectQueue(QUEUE_NAMES.ELASTICSEARCH) private readonly elasticsearch_queue: Queue + ) { super( elasticsearch_queue, JOB_NAMES.ELASTICSEARCH.DELETE_USER, diff --git a/src/background-jobs/elasticsearch/es-follow.service.ts b/src/background-jobs/elasticsearch/es-follow.service.ts index 89c74ea6..eca85ec0 100644 --- a/src/background-jobs/elasticsearch/es-follow.service.ts +++ b/src/background-jobs/elasticsearch/es-follow.service.ts @@ -7,7 +7,9 @@ import { EsSyncFollowDto } from './dtos/es-sync-follow.dto'; @Injectable() export class EsFollowJobService extends BackgroundJobsService { - constructor(@InjectQueue(QUEUE_NAMES.ELASTICSEARCH) private elasticsearch_queue: Queue) { + constructor( + @InjectQueue(QUEUE_NAMES.ELASTICSEARCH) private readonly elasticsearch_queue: Queue + ) { super( elasticsearch_queue, JOB_NAMES.ELASTICSEARCH.FOLLOW, diff --git a/src/background-jobs/elasticsearch/es-index-tweet.service.ts b/src/background-jobs/elasticsearch/es-index-tweet.service.ts index 85637644..0ea15b8f 100644 --- a/src/background-jobs/elasticsearch/es-index-tweet.service.ts +++ b/src/background-jobs/elasticsearch/es-index-tweet.service.ts @@ -7,7 +7,9 @@ import { EsSyncTweetDto } from './dtos/es-sync-tweet.dto'; @Injectable() export class EsIndexTweetJobService extends BackgroundJobsService { - constructor(@InjectQueue(QUEUE_NAMES.ELASTICSEARCH) private elasticsearch_queue: Queue) { + constructor( + @InjectQueue(QUEUE_NAMES.ELASTICSEARCH) private readonly elasticsearch_queue: Queue + ) { super( elasticsearch_queue, JOB_NAMES.ELASTICSEARCH.INDEX_TWEET, diff --git a/src/background-jobs/elasticsearch/es-sync.processor.spec.ts b/src/background-jobs/elasticsearch/es-sync.processor.spec.ts index b860d641..e123a90e 100644 --- a/src/background-jobs/elasticsearch/es-sync.processor.spec.ts +++ b/src/background-jobs/elasticsearch/es-sync.processor.spec.ts @@ -29,8 +29,10 @@ describe('EsSyncProcessor', () => { const mock_elasticsearch_service = { index: jest.fn(), delete: jest.fn(), + bulk: jest.fn(), updateByQuery: jest.fn(), deleteByQuery: jest.fn(), + get: jest.fn(), }; const mock_user_follows_repository = { @@ -183,6 +185,7 @@ describe('EsSyncProcessor', () => { const mock_tweet = { tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', content: 'Reply tweet', + type: TweetType.REPLY, user_id: '1a2b3c4d-5e6f-7g8h-9i0j-k1l2m3n4o5p6', user: { name: 'Test User', @@ -212,69 +215,335 @@ describe('EsSyncProcessor', () => { }), }); }); - }); - describe('handleDeleteTweet', () => { - it('should delete a tweet successfully', async () => { + it('should use existing parent_id from ES when not provided in job data', async () => { + const mock_tweet = { + tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + content: 'Reply tweet', + type: TweetType.REPLY, + user_id: '1a2b3c4d-5e6f-7g8h-9i0j-k1l2m3n4o5p6', + user: { + name: 'Test User', + username: 'testuser', + } as User, + } as Tweet; + const job = { data: { tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + parent_id: undefined, + conversation_id: '4fa1b0f4-a059-4b6f-ab1f-137217d33d3c', }, } as Job; - mock_elasticsearch_service.delete.mockResolvedValue({} as any); + const existing_es_doc = { + _source: { + parent_id: '6ba9c7cf-302b-433f-8642-50de81ef0372', + conversation_id: '4fa1b0f4-a059-4b6f-ab1f-137217d33d3c', + }, + }; - const logger_spy = jest.spyOn(Logger.prototype, 'log'); + mock_tweets_repository.findOne.mockResolvedValue(mock_tweet); + mock_elasticsearch_service.get.mockResolvedValue(existing_es_doc as any); + mock_elasticsearch_service.index.mockResolvedValue({} as any); - await processor.handleDeleteTweet(job); + await processor.handleIndexTweet(job); - expect(mock_elasticsearch_service.delete).toHaveBeenCalledWith({ + expect(mock_elasticsearch_service.get).toHaveBeenCalledWith({ index: ELASTICSEARCH_INDICES.TWEETS, id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', }); + expect(mock_elasticsearch_service.index).toHaveBeenCalledWith({ + index: ELASTICSEARCH_INDICES.TWEETS, + id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + document: expect.objectContaining({ + parent_id: '6ba9c7cf-302b-433f-8642-50de81ef0372', + conversation_id: '4fa1b0f4-a059-4b6f-ab1f-137217d33d3c', + }), + }); + }); + + it('should use existing conversation_id from ES when not provided in job data', async () => { + const mock_tweet = { + tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + content: 'Reply tweet', + type: TweetType.REPLY, + user_id: '1a2b3c4d-5e6f-7g8h-9i0j-k1l2m3n4o5p6', + user: { + name: 'Test User', + username: 'testuser', + } as User, + } as Tweet; + + const job = { + data: { + tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + parent_id: '6ba9c7cf-302b-433f-8642-50de81ef0372', + conversation_id: undefined, + }, + } as Job; + + const existing_es_doc = { + _source: { + parent_id: '6ba9c7cf-302b-433f-8642-50de81ef0372', + conversation_id: '4fa1b0f4-a059-4b6f-ab1f-137217d33d3c', + }, + }; + + mock_tweets_repository.findOne.mockResolvedValue(mock_tweet); + mock_elasticsearch_service.get.mockResolvedValue(existing_es_doc as any); + mock_elasticsearch_service.index.mockResolvedValue({} as any); + + await processor.handleIndexTweet(job); + + expect(mock_elasticsearch_service.get).toHaveBeenCalledWith({ + index: ELASTICSEARCH_INDICES.TWEETS, + id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + }); + expect(mock_elasticsearch_service.index).toHaveBeenCalledWith({ + index: ELASTICSEARCH_INDICES.TWEETS, + id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + document: expect.objectContaining({ + parent_id: '6ba9c7cf-302b-433f-8642-50de81ef0372', + conversation_id: '4fa1b0f4-a059-4b6f-ab1f-137217d33d3c', + }), + }); + }); + + it('should use existing parent_id and conversation_id from ES when both not provided', async () => { + const mock_tweet = { + tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + content: 'Reply tweet', + type: TweetType.REPLY, + user_id: '1a2b3c4d-5e6f-7g8h-9i0j-k1l2m3n4o5p6', + user: { + name: 'Test User', + username: 'testuser', + } as User, + } as Tweet; + + const job = { + data: { + tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + parent_id: undefined, + conversation_id: undefined, + }, + } as Job; + + const existing_es_doc = { + _source: { + parent_id: '6ba9c7cf-302b-433f-8642-50de81ef0372', + conversation_id: '4fa1b0f4-a059-4b6f-ab1f-137217d33d3c', + }, + }; + + mock_tweets_repository.findOne.mockResolvedValue(mock_tweet); + mock_elasticsearch_service.get.mockResolvedValue(existing_es_doc as any); + mock_elasticsearch_service.index.mockResolvedValue({} as any); + + await processor.handleIndexTweet(job); + + expect(mock_elasticsearch_service.get).toHaveBeenCalledWith({ + index: ELASTICSEARCH_INDICES.TWEETS, + id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + }); + expect(mock_elasticsearch_service.index).toHaveBeenCalledWith({ + index: ELASTICSEARCH_INDICES.TWEETS, + id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + document: expect.objectContaining({ + parent_id: '6ba9c7cf-302b-433f-8642-50de81ef0372', + conversation_id: '4fa1b0f4-a059-4b6f-ab1f-137217d33d3c', + }), + }); + }); + + it('should skip ES lookup when tweet type is TWEET even if IDs not provided', async () => { + const mock_tweet = { + tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + content: 'Regular tweet', + type: TweetType.TWEET, + user_id: '1a2b3c4d-5e6f-7g8h-9i0j-k1l2m3n4o5p6', + user: { + name: 'Test User', + username: 'testuser', + } as User, + } as Tweet; + + const job = { + data: { + tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + parent_id: undefined, + conversation_id: undefined, + }, + } as Job; + + mock_tweets_repository.findOne.mockResolvedValue(mock_tweet); + mock_elasticsearch_service.index.mockResolvedValue({} as any); + + await processor.handleIndexTweet(job); + + expect(mock_elasticsearch_service.get).not.toHaveBeenCalled(); + expect(mock_elasticsearch_service.index).toHaveBeenCalled(); + }); + + it('should handle ES get error gracefully and continue with indexing', async () => { + const mock_tweet = { + tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + content: 'Reply tweet', + type: TweetType.REPLY, + user_id: '1a2b3c4d-5e6f-7g8h-9i0j-k1l2m3n4o5p6', + user: { + name: 'Test User', + username: 'testuser', + } as User, + } as Tweet; + + const job = { + data: { + tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + parent_id: undefined, + conversation_id: undefined, + }, + } as Job; + + mock_tweets_repository.findOne.mockResolvedValue(mock_tweet); + mock_elasticsearch_service.get.mockRejectedValue(new Error('Document not found')); + mock_elasticsearch_service.index.mockResolvedValue({} as any); + + const logger_spy = jest.spyOn(Logger.prototype, 'debug'); + + await processor.handleIndexTweet(job); + expect(logger_spy).toHaveBeenCalledWith( - 'Deleted tweet 0c059899-f706-4c8f-97d7-ba2e9fc22d6d from Elasticsearch' + 'No existing ES document for tweet 0c059899-f706-4c8f-97d7-ba2e9fc22d6d' ); + expect(mock_elasticsearch_service.index).toHaveBeenCalledWith({ + index: ELASTICSEARCH_INDICES.TWEETS, + id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + document: expect.objectContaining({ + tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + }), + }); }); - it('should skip if tweet not found in ES (404)', async () => { + it('should prefer job data IDs over existing ES document IDs', async () => { + const mock_tweet = { + tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + content: 'Reply tweet', + type: TweetType.REPLY, + user_id: '1a2b3c4d-5e6f-7g8h-9i0j-k1l2m3n4o5p6', + user: { + name: 'Test User', + username: 'testuser', + } as User, + } as Tweet; + const job = { data: { tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + parent_id: 'new-parent-id', + conversation_id: 'new-conversation-id', }, } as Job; - const error = { - meta: { statusCode: 404 }, + const existing_es_doc = { + _source: { + parent_id: 'old-parent-id', + conversation_id: 'old-conversation-id', + }, }; - mock_elasticsearch_service.delete.mockRejectedValue(error); + + mock_tweets_repository.findOne.mockResolvedValue(mock_tweet); + mock_elasticsearch_service.get.mockResolvedValue(existing_es_doc as any); + mock_elasticsearch_service.index.mockResolvedValue({} as any); + + await processor.handleIndexTweet(job); + + expect(mock_elasticsearch_service.get).not.toHaveBeenCalled(); + expect(mock_elasticsearch_service.index).toHaveBeenCalledWith({ + index: ELASTICSEARCH_INDICES.TWEETS, + id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + document: expect.objectContaining({ + parent_id: 'new-parent-id', + conversation_id: 'new-conversation-id', + }), + }); + }); + }); + + describe('handleDeleteTweet', () => { + it('should delete a tweet successfully', async () => { + const job = { + data: { + tweet_ids: ['tweet-123', 'tweet-321'], + }, + } as Job; + + mock_elasticsearch_service.bulk.mockResolvedValue({} as any); + + const logger_spy = jest.spyOn(Logger.prototype, 'log'); + + await processor.handleDeleteTweet(job); + + expect(mock_elasticsearch_service.bulk).toHaveBeenCalledWith({ + body: [ + { delete: { _index: ELASTICSEARCH_INDICES.TWEETS, _id: 'tweet-123' } }, + { delete: { _index: ELASTICSEARCH_INDICES.TWEETS, _id: 'tweet-321' } }, + ], + }); + expect(logger_spy).toHaveBeenCalledWith('Deleted 2 tweets from Elasticsearch'); + }); + + it('should skip if tweet not found in ES (404)', async () => { + const job = { + data: { + tweet_ids: ['tweet-123', 'tweet-321'], + }, + } as Job; + + mock_elasticsearch_service.bulk.mockResolvedValue({ + errors: true, + items: [ + { + delete: { + _id: 'tweet-123', + status: 404, + error: { type: 'document_missing_exception' }, + }, + }, + { + delete: { + _id: 'tweet-321', + status: 404, + error: { type: 'document_missing_exception' }, + }, + }, + ], + }); const logger_spy = jest.spyOn(Logger.prototype, 'warn'); await processor.handleDeleteTweet(job); - expect(logger_spy).toHaveBeenCalledWith( - 'Tweet 0c059899-f706-4c8f-97d7-ba2e9fc22d6d not found in ES, skipping delete' - ); + expect(logger_spy).toHaveBeenCalledWith('Tweet tweet-123 not found in ES, skipping'); + expect(logger_spy).toHaveBeenCalledWith('Tweet tweet-321 not found in ES, skipping'); }); it('should handle delete errors', async () => { const job = { data: { - tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + tweet_ids: ['tweet-123', 'tweet-321'], }, } as Job; - const error = new Error('Delete failed'); - mock_elasticsearch_service.delete.mockRejectedValue(error); + const error = new Error('Bulk delete failed'); + mock_elasticsearch_service.bulk.mockRejectedValue(error); const logger_spy = jest.spyOn(Logger.prototype, 'error'); - await expect(processor.handleDeleteTweet(job)).rejects.toThrow('Delete failed'); - expect(logger_spy).toHaveBeenCalledWith( - 'Failed to delete tweet 0c059899-f706-4c8f-97d7-ba2e9fc22d6d:', - error - ); + await expect(processor.handleDeleteTweet(job)).rejects.toThrow('Bulk delete failed'); + + expect(logger_spy).toHaveBeenCalledWith('Bulk delete failed:', error); }); }); diff --git a/src/background-jobs/elasticsearch/es-sync.processor.ts b/src/background-jobs/elasticsearch/es-sync.processor.ts index 8145f5c7..bbac3780 100644 --- a/src/background-jobs/elasticsearch/es-sync.processor.ts +++ b/src/background-jobs/elasticsearch/es-sync.processor.ts @@ -11,6 +11,8 @@ import { Repository } from 'typeorm'; import { User, UserFollows } from 'src/user/entities'; import { EsSyncUserDto } from './dtos/es-sync-user.dto'; import { EsSyncFollowDto } from './dtos/es-sync-follow.dto'; +import { TweetType } from 'src/shared/enums/tweet-types.enum'; +import { EsDeleteTweetsDto } from './dtos/es-delete-tweets.dto'; @Processor(QUEUE_NAMES.ELASTICSEARCH) export class EsSyncProcessor { @@ -18,9 +20,9 @@ export class EsSyncProcessor { constructor( @InjectRepository(Tweet) - private tweets_repository: Repository, + private readonly tweets_repository: Repository, @InjectRepository(User) - private user_repository: Repository, + private readonly user_repository: Repository, private readonly elasticsearch_service: ElasticsearchService, @InjectRepository(UserFollows) private readonly user_follows_repository: Repository @@ -41,10 +43,31 @@ export class EsSyncProcessor { return; } + let final_parent_id = parent_id; + let final_conversation_id = conversation_id; + + if ((!parent_id || !conversation_id) && tweet.type !== TweetType.TWEET) { + try { + const existing_doc = await this.elasticsearch_service.get<{ + parent_id?: string; + conversation_id?: string; + }>({ + index: ELASTICSEARCH_INDICES.TWEETS, + id: tweet_id, + }); + + final_parent_id = parent_id || existing_doc._source?.parent_id; + final_conversation_id = + conversation_id || existing_doc._source?.conversation_id; + } catch (error) { + this.logger.debug(`No existing ES document for tweet ${tweet_id}`); + } + } + await this.elasticsearch_service.index({ index: ELASTICSEARCH_INDICES.TWEETS, id: tweet_id, - document: this.transformTweetForES(tweet, parent_id, conversation_id), + document: this.transformTweetForES(tweet, final_parent_id, final_conversation_id), }); this.logger.log(`Indexed tweet ${tweet_id} to Elasticsearch`); @@ -55,23 +78,41 @@ export class EsSyncProcessor { } @Process(JOB_NAMES.ELASTICSEARCH.DELETE_TWEET) - async handleDeleteTweet(job: Job) { - const { tweet_id } = job.data; + async handleDeleteTweet(job: Job) { + const { tweet_ids } = job.data; + + if (!tweet_ids?.length) { + this.logger.warn('No tweet_ids provided, skipping ES delete'); + return; + } try { - await this.elasticsearch_service.delete({ - index: ELASTICSEARCH_INDICES.TWEETS, - id: tweet_id, - }); + const body = tweet_ids.flatMap((tweet_id: string) => [ + { delete: { _index: ELASTICSEARCH_INDICES.TWEETS, _id: tweet_id } }, + ]); - this.logger.log(`Deleted tweet ${tweet_id} from Elasticsearch`); - } catch (error) { - if (error.meta?.statusCode === 404) { - this.logger.warn(`Tweet ${tweet_id} not found in ES, skipping delete`); - } else { - this.logger.error(`Failed to delete tweet ${tweet_id}:`, error); - throw error; + const response = await this.elasticsearch_service.bulk({ body }); + + if (response.errors) { + response.items.forEach((item, i) => { + const result = item.delete; + if (result?.error) { + if (result.status === 404) { + this.logger.warn(`Tweet ${tweet_ids[i]} not found in ES, skipping`); + } else { + this.logger.error( + `Failed to delete tweet ${tweet_ids[i]}:`, + result.error + ); + } + } + }); } + + this.logger.log(`Deleted ${tweet_ids.length} tweets from Elasticsearch`); + } catch (error) { + this.logger.error('Bulk delete failed:', error); + throw error; } } @@ -257,6 +298,7 @@ export class EsSyncProcessor { following: tweet.user?.following || 0, images: tweet.images || [], videos: tweet.videos || [], + mentions: tweet.mentions || [], bio: tweet.user?.bio, avatar_url: tweet.user?.avatar_url, }; @@ -315,7 +357,7 @@ export class EsSyncProcessor { private extractHashtags(content: string): string[] { if (!content) return []; - const regex = /#[\w]+/g; + const regex = /#[\p{L}\p{N}_]+/gu; const matches = content.match(regex); if (!matches) return []; diff --git a/src/background-jobs/elasticsearch/es-update-user.service.ts b/src/background-jobs/elasticsearch/es-update-user.service.ts index aa65d2ed..be44d401 100644 --- a/src/background-jobs/elasticsearch/es-update-user.service.ts +++ b/src/background-jobs/elasticsearch/es-update-user.service.ts @@ -7,7 +7,9 @@ import { EsSyncUserDto } from './dtos/es-sync-user.dto'; @Injectable() export class EsUpdateUserJobService extends BackgroundJobsService { - constructor(@InjectQueue(QUEUE_NAMES.ELASTICSEARCH) private elasticsearch_queue: Queue) { + constructor( + @InjectQueue(QUEUE_NAMES.ELASTICSEARCH) private readonly elasticsearch_queue: Queue + ) { super( elasticsearch_queue, JOB_NAMES.ELASTICSEARCH.UPDATE_USER, diff --git a/src/background-jobs/explore/explore-jobs.controller.ts b/src/background-jobs/explore/explore-jobs.controller.ts index 00526050..972895af 100644 --- a/src/background-jobs/explore/explore-jobs.controller.ts +++ b/src/background-jobs/explore/explore-jobs.controller.ts @@ -37,4 +37,11 @@ export class ExploreController { queue_stats: stats, }; } + + @Post('clear') + @ApiOperation({ summary: 'Manually clear explore score recalculation' }) + @ApiResponse({ status: 200, description: 'Explore score job cleared successfully' }) + async clearExploreUpdate() { + return await this.explore_jobs_service.clearScoreRecalculation(); + } } diff --git a/src/background-jobs/explore/explore-jobs.cron.spec.ts b/src/background-jobs/explore/explore-jobs.cron.spec.ts index 9650dd63..6df1b705 100644 --- a/src/background-jobs/explore/explore-jobs.cron.spec.ts +++ b/src/background-jobs/explore/explore-jobs.cron.spec.ts @@ -62,5 +62,13 @@ describe('ExploreJobsCron', () => { expect(mock_explore_jobs_service.triggerScoreRecalculation).toHaveBeenCalled(); }); + + it('should handle exceptions thrown during scheduling', async () => { + mock_explore_jobs_service.triggerScoreRecalculation.mockRejectedValue( + new Error('Unexpected error') + ); + + await expect(cron.scheduleExploreScoreUpdate()).resolves.not.toThrow(); + }); }); }); diff --git a/src/background-jobs/explore/explore-jobs.cron.ts b/src/background-jobs/explore/explore-jobs.cron.ts index 54fb3a0f..48d1a88e 100644 --- a/src/background-jobs/explore/explore-jobs.cron.ts +++ b/src/background-jobs/explore/explore-jobs.cron.ts @@ -12,6 +12,7 @@ import { ExploreJobsService } from './explore-jobs.service'; export class ExploreJobsCron { private readonly logger = new Logger(ExploreJobsCron.name); + /* istanbul ignore next */ constructor(private readonly explore_jobs_service: ExploreJobsService) {} // Schedule explore score update job every hour diff --git a/src/background-jobs/explore/explore-jobs.processor.spec.ts b/src/background-jobs/explore/explore-jobs.processor.spec.ts index f3e1fc62..d311fffb 100644 --- a/src/background-jobs/explore/explore-jobs.processor.spec.ts +++ b/src/background-jobs/explore/explore-jobs.processor.spec.ts @@ -8,6 +8,7 @@ describe('ExploreJobsProcessor', () => { let explore_jobs_service: ExploreJobsService; const mock_explore_jobs_service = { + recalculateExistingTopTweets: jest.fn(), countTweetsForRecalculation: jest.fn(), fetchTweetsForRecalculation: jest.fn(), calculateScore: jest.fn(), @@ -60,6 +61,10 @@ describe('ExploreJobsProcessor', () => { }, ]; + mock_explore_jobs_service.recalculateExistingTopTweets.mockResolvedValue({ + categories_processed: 0, + tweets_recalculated: 0, + }); mock_explore_jobs_service.countTweetsForRecalculation.mockResolvedValue(1); mock_explore_jobs_service.fetchTweetsForRecalculation.mockResolvedValueOnce( mock_tweets @@ -84,6 +89,10 @@ describe('ExploreJobsProcessor', () => { progress: jest.fn().mockResolvedValue(undefined), } as unknown as Job; + mock_explore_jobs_service.recalculateExistingTopTweets.mockResolvedValue({ + categories_processed: 0, + tweets_recalculated: 0, + }); mock_explore_jobs_service.countTweetsForRecalculation.mockResolvedValue(0); const result = await processor.handleRecalculateExploreScores(mock_job); @@ -135,6 +144,10 @@ describe('ExploreJobsProcessor', () => { }, ]; + mock_explore_jobs_service.recalculateExistingTopTweets.mockResolvedValue({ + categories_processed: 0, + tweets_recalculated: 0, + }); mock_explore_jobs_service.countTweetsForRecalculation.mockResolvedValue(3); mock_explore_jobs_service.fetchTweetsForRecalculation .mockResolvedValueOnce(batch1) @@ -174,6 +187,10 @@ describe('ExploreJobsProcessor', () => { }, ]; + mock_explore_jobs_service.recalculateExistingTopTweets.mockResolvedValue({ + categories_processed: 0, + tweets_recalculated: 0, + }); mock_explore_jobs_service.countTweetsForRecalculation.mockResolvedValue(1); mock_explore_jobs_service.fetchTweetsForRecalculation .mockResolvedValueOnce(mock_tweets) @@ -209,6 +226,10 @@ describe('ExploreJobsProcessor', () => { }, ]; + mock_explore_jobs_service.recalculateExistingTopTweets.mockResolvedValue({ + categories_processed: 0, + tweets_recalculated: 0, + }); mock_explore_jobs_service.countTweetsForRecalculation.mockResolvedValue(1); mock_explore_jobs_service.fetchTweetsForRecalculation .mockResolvedValueOnce(mock_tweets) @@ -229,7 +250,7 @@ describe('ExploreJobsProcessor', () => { progress: jest.fn().mockResolvedValue(undefined), } as unknown as Job; - mock_explore_jobs_service.countTweetsForRecalculation.mockRejectedValue( + mock_explore_jobs_service.recalculateExistingTopTweets.mockRejectedValue( new Error('Database connection failed') ); @@ -245,6 +266,10 @@ describe('ExploreJobsProcessor', () => { progress: jest.fn().mockResolvedValue(undefined), } as unknown as Job; + mock_explore_jobs_service.recalculateExistingTopTweets.mockResolvedValue({ + categories_processed: 0, + tweets_recalculated: 0, + }); mock_explore_jobs_service.countTweetsForRecalculation.mockResolvedValue(0); const result = await processor.handleRecalculateExploreScores(mock_job); @@ -289,6 +314,10 @@ describe('ExploreJobsProcessor', () => { }, ]; + mock_explore_jobs_service.recalculateExistingTopTweets.mockResolvedValue({ + categories_processed: 0, + tweets_recalculated: 0, + }); mock_explore_jobs_service.countTweetsForRecalculation.mockResolvedValue(2); mock_explore_jobs_service.fetchTweetsForRecalculation .mockResolvedValueOnce(batch1) diff --git a/src/background-jobs/explore/explore-jobs.processor.ts b/src/background-jobs/explore/explore-jobs.processor.ts index bc524c35..11ddbf61 100644 --- a/src/background-jobs/explore/explore-jobs.processor.ts +++ b/src/background-jobs/explore/explore-jobs.processor.ts @@ -38,25 +38,44 @@ export class ExploreJobsProcessor { }; try { - //count total tweets to process + // STEP 1: Recalculate existing Redis top-N tweets + this.logger.log(`[Job ${job.id}] Step 1: Recalculating existing top tweets in Redis`); await job.progress(5); + + const step1_result = await this.exploreJobsService.recalculateExistingTopTweets(); + + this.logger.log( + `[Job ${job.id}] Step 1 Complete - Categories: ${step1_result.categories_processed}, ` + + `Tweets Recalculated: ${step1_result.tweets_recalculated}` + ); + + await job.progress(15); + + // STEP 2: Process recent engagement tweets + this.logger.log(`[Job ${job.id}] Step 2: Processing recent engagement tweets`); + const total_tweets = await this.exploreJobsService.countTweetsForRecalculation( since_hours, max_age_hours, force_all ); - this.logger.log(`[Job ${job.id}] Found ${total_tweets} tweets to process`); + this.logger.log(`[Job ${job.id}] Found ${total_tweets} recent tweets to process`); if (total_tweets === 0) { result.duration_ms = Date.now() - start_time; + result.tweets_updated = step1_result.tweets_recalculated; await job.progress(100); + this.logger.log( + `[Job ${job.id}] Completed - Only Step 1 executed (no recent engagement tweets)` + ); return result; } // process in batches let processed_count = 0; let page = 0; + const all_categories_updated = new Set(); while (processed_count < total_tweets) { const skip = page * batch_size; @@ -75,34 +94,28 @@ export class ExploreJobsProcessor { } try { - // calculate scores for batch - const tweet_scores = batch.map((tweet) => ({ + // Calculate scores and prepare for Redis update + const tweets_with_categories = batch.map((tweet) => ({ tweet_id: tweet.tweet_id, score: this.exploreJobsService.calculateScore(tweet), - })); - - // update Redis with new scores - const tweets_with_categories = batch.map((tweet, index) => ({ - tweet_id: tweet.tweet_id, - score: tweet_scores[index].score, categories: tweet.categories || [], })); - const categories_updated = - await this.exploreJobsService.updateRedisCategoryScores( - tweets_with_categories - ); - result.categories_updated = Math.max( - result.categories_updated, - categories_updated - ); + // Track unique categories from this batch + for (const tweet of tweets_with_categories) { + for (const cat of tweet.categories) { + all_categories_updated.add(cat.category_id); + } + } + + await this.exploreJobsService.updateRedisCategoryScores(tweets_with_categories); processed_count += batch.length; result.tweets_processed += batch.length; result.tweets_updated += batch.length; - // update job progress (debugging purpose) - const progress = Math.floor(10 + (processed_count / total_tweets) * 85); + // update job progress (Step 1: 0-15%, Step 2: 15-100%) + const progress = Math.floor(15 + (processed_count / total_tweets) * 85); await job.progress(progress); this.logger.debug( @@ -117,13 +130,22 @@ export class ExploreJobsProcessor { page++; } + // Add Step 1 tweets to total updated count + result.tweets_updated += step1_result.tweets_recalculated; + + // Set final unique categories count + result.categories_updated = all_categories_updated.size; + result.duration_ms = Date.now() - start_time; await job.progress(100); this.logger.log( - `[Job ${job.id}] Completed - Processed: ${result.tweets_processed}, ` + - `Categories Updated (Max): ${result.categories_updated}, ` + + `[Job ${job.id}] Completed - ` + + `Step 1: ${step1_result.tweets_recalculated} tweets, ` + + `Step 2: ${result.tweets_processed} tweets, ` + + `Total Updated: ${result.tweets_updated}, ` + + `Categories: ${result.categories_updated}, ` + `Duration: ${result.duration_ms}ms` ); diff --git a/src/background-jobs/explore/explore-jobs.service.spec.ts b/src/background-jobs/explore/explore-jobs.service.spec.ts index a32f37da..b35a01cb 100644 --- a/src/background-jobs/explore/explore-jobs.service.spec.ts +++ b/src/background-jobs/explore/explore-jobs.service.spec.ts @@ -19,6 +19,8 @@ describe('ExploreJobsService', () => { const mock_redis_service = { pipeline: jest.fn(), + keys: jest.fn(), + deleteByPrefix: jest.fn(), }; const mock_queue = { @@ -105,6 +107,12 @@ describe('ExploreJobsService', () => { expect(stats.failed).toBe(0); expect(stats.total_jobs).toBe(6); }); + + it('should throw error when queue operations fail', async () => { + mock_queue.getWaiting.mockRejectedValue(new Error('Queue connection failed')); + + await expect(service.getQueueStats()).rejects.toThrow('Queue connection failed'); + }); }); describe('calculateScore', () => { @@ -175,6 +183,24 @@ describe('ExploreJobsService', () => { expect(score).toBeGreaterThan(0); expect(typeof score).toBe('number'); }); + + it('should handle edge case where denominator could be zero', () => { + // This is a defensive check - mathematically unlikely but handled + const tweet = { + tweet_id: 'tweet-4', + num_likes: 100, + num_reposts: 50, + num_quotes: 20, + num_replies: 30, + created_at: new Date(), + }; + + const score = service.calculateScore(tweet); + + // Should return a valid number, not NaN or Infinity + expect(typeof score).toBe('number'); + expect(isFinite(score)).toBe(true); + }); }); describe('countTweetsForRecalculation', () => { @@ -351,4 +377,417 @@ describe('ExploreJobsService', () => { expect(categories_updated).toBe(1); }); }); + + describe('getAllActiveCategoryIds', () => { + it('should return active category IDs from Redis', async () => { + const mock_keys = ['explore:category:21', 'explore:category:20', 'invalid-key']; + (mock_redis_service as any).keys = jest.fn().mockResolvedValue(mock_keys); + + const result = await service.getAllActiveCategoryIds(); + + expect(result).toEqual(['21', '20']); + expect(mock_redis_service.keys).toHaveBeenCalledWith('explore:category:*'); + }); + + it('should handle redis errors', async () => { + (mock_redis_service as any).keys = jest + .fn() + .mockRejectedValue(new Error('Redis error')); + + const result = await service.getAllActiveCategoryIds(); + + expect(result).toEqual([]); + }); + }); + + describe('fetchTweetsByIds', () => { + it('should return tweets for given IDs', async () => { + const tweet_ids = ['tweet-1', 'tweet-2']; + const mock_tweets = [{ tweet_id: 'tweet-1' }, { tweet_id: 'tweet-2' }]; + + const mock_query_builder = { + leftJoinAndMapMany: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + andWhere: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue(mock_tweets), + }; + + mock_tweet_repository.createQueryBuilder.mockReturnValue(mock_query_builder); + + const result = await service.fetchTweetsByIds(tweet_ids); + + expect(result).toEqual(mock_tweets); + expect(mock_query_builder.andWhere).toHaveBeenCalledWith( + 'tweet.tweet_id IN (:...tweet_ids)', + { tweet_ids } + ); + }); + + it('should return empty array if no IDs provided', async () => { + const result = await service.fetchTweetsByIds([]); + expect(result).toEqual([]); + }); + + it('should handle database errors', async () => { + const mock_query_builder = { + leftJoinAndMapMany: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + andWhere: jest.fn().mockReturnThis(), + getMany: jest.fn().mockRejectedValue(new Error('DB Error')), + }; + mock_tweet_repository.createQueryBuilder.mockReturnValue(mock_query_builder); + + const result = await service.fetchTweetsByIds(['tweet-1']); + expect(result).toEqual([]); + }); + }); + + describe('recalculateExistingTopTweets', () => { + beforeEach(() => { + // Mock getAllActiveCategoryIds for this suite + (mock_redis_service as any).keys = jest.fn().mockResolvedValue(['explore:category:21']); + }); + + it('should recalculate scores for existing tweets', async () => { + // Mock Redis pipeline for fetching + const mock_fetch_pipeline = { + zrevrange: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([ + [null, ['tweet-1', '100', 'tweet-2', '50']], // Results for category 21 + ]), + }; + mock_redis_service.pipeline.mockReturnValueOnce(mock_fetch_pipeline); + + // Mock fetching tweet data + const mock_tweets = [ + { + tweet_id: 'tweet-1', + num_likes: 100, + num_reposts: 50, + num_quotes: 20, + num_replies: 30, + created_at: new Date(), + categories: [{ category_id: '21', percentage: 100 }], + }, + { + tweet_id: 'tweet-2', + num_likes: 50, + num_reposts: 10, + num_quotes: 5, + num_replies: 5, + created_at: new Date(), + categories: [{ category_id: '21', percentage: 100 }], + }, + ]; + + // Mock fetchTweetsByIds internal call + const mock_query_builder = { + leftJoinAndMapMany: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + andWhere: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue(mock_tweets), + }; + mock_tweet_repository.createQueryBuilder.mockReturnValue(mock_query_builder); + + // Mock Redis pipeline for updates + const mock_update_pipeline = { + zrem: jest.fn().mockReturnThis(), + zadd: jest.fn().mockReturnThis(), + zremrangebyrank: jest.fn().mockReturnThis(), + expire: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([]), + }; + mock_redis_service.pipeline.mockReturnValueOnce(mock_update_pipeline); + + // Mock Redis pipeline for trim + const mock_trim_pipeline = { + zremrangebyrank: jest.fn().mockReturnThis(), + expire: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([]), + }; + mock_redis_service.pipeline.mockReturnValueOnce(mock_trim_pipeline); + + const result = await service.recalculateExistingTopTweets(); + + expect(result.categories_processed).toBe(1); + expect(result.tweets_recalculated).toBe(2); + expect(mock_update_pipeline.zadd).toHaveBeenCalledTimes(2); + }); + + it('should return early if no active categories', async () => { + (mock_redis_service as any).keys = jest.fn().mockResolvedValue([]); + + const result = await service.recalculateExistingTopTweets(); + + expect(result.categories_processed).toBe(0); + expect(result.tweets_recalculated).toBe(0); + }); + + it('should handle missing pipeline results', async () => { + const mock_fetch_pipeline = { + zrevrange: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue(null), + }; + mock_redis_service.pipeline.mockReturnValueOnce(mock_fetch_pipeline); + + const result = await service.recalculateExistingTopTweets(); + + expect(result.categories_processed).toBe(0); + }); + + it('should handle tweets not found in DB', async () => { + // Mock Redis pipeline for fetching + const mock_fetch_pipeline = { + zrevrange: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([[null, ['tweet-deleted', '100']]]), + }; + mock_redis_service.pipeline.mockReturnValueOnce(mock_fetch_pipeline); + + // Mock fetching tweet data returns empty + const mock_query_builder = { + leftJoinAndMapMany: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + andWhere: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([]), + }; + mock_tweet_repository.createQueryBuilder.mockReturnValue(mock_query_builder); + + const mock_update_pipeline = { + zrem: jest.fn().mockReturnThis(), + zadd: jest.fn().mockReturnThis(), + zremrangebyrank: jest.fn().mockReturnThis(), + expire: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([]), + }; + mock_redis_service.pipeline.mockReturnValueOnce(mock_update_pipeline); + + // Mock Redis pipeline for trim + const mock_trim_pipeline = { + zremrangebyrank: jest.fn().mockReturnThis(), + expire: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([]), + }; + mock_redis_service.pipeline.mockReturnValueOnce(mock_trim_pipeline); + + const result = await service.recalculateExistingTopTweets(); + + expect(result.tweets_recalculated).toBe(0); + expect(mock_update_pipeline.zrem).toHaveBeenCalledWith( + 'explore:category:21', + 'tweet-deleted' + ); + }); + + it('should handle pipeline errors for categories', async () => { + // Mock Redis pipeline with error for one category + const mock_fetch_pipeline = { + zrevrange: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([ + [new Error('Redis error'), null], // Error for category 21 + ]), + }; + mock_redis_service.pipeline.mockReturnValueOnce(mock_fetch_pipeline); + + const result = await service.recalculateExistingTopTweets(); + + expect(result.categories_processed).toBe(1); + expect(result.tweets_recalculated).toBe(0); + }); + + it('should handle all categories returning no tweets', async () => { + // Mock Redis pipeline with empty results + const mock_fetch_pipeline = { + zrevrange: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([ + [null, []], // Empty results for category 21 + ]), + }; + mock_redis_service.pipeline.mockReturnValueOnce(mock_fetch_pipeline); + + const result = await service.recalculateExistingTopTweets(); + + expect(result.categories_processed).toBe(1); + expect(result.tweets_recalculated).toBe(0); + }); + + it('should remove tweets with score below threshold', async () => { + // Mock Redis pipeline for fetching + const mock_fetch_pipeline = { + zrevrange: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([[null, ['tweet-low-score', '100']]]), + }; + mock_redis_service.pipeline.mockReturnValueOnce(mock_fetch_pipeline); + + // Mock fetching tweet data with very low engagement + const mock_tweets = [ + { + tweet_id: 'tweet-low-score', + num_likes: 0, + num_reposts: 0, + num_quotes: 0, + num_replies: 0, + created_at: new Date(Date.now() - 1000 * 60 * 60 * 24 * 7), // 7 days old + categories: [{ category_id: '21', percentage: 100 }], + }, + ]; + + const mock_query_builder = { + leftJoinAndMapMany: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + andWhere: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue(mock_tweets), + }; + mock_tweet_repository.createQueryBuilder.mockReturnValue(mock_query_builder); + + const mock_update_pipeline = { + zrem: jest.fn().mockReturnThis(), + zadd: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([]), + }; + mock_redis_service.pipeline.mockReturnValueOnce(mock_update_pipeline); + + // Mock Redis pipeline for trim + const mock_trim_pipeline = { + zremrangebyrank: jest.fn().mockReturnThis(), + expire: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([]), + }; + mock_redis_service.pipeline.mockReturnValueOnce(mock_trim_pipeline); + + const result = await service.recalculateExistingTopTweets(); + + expect(result.tweets_recalculated).toBe(0); + expect(mock_update_pipeline.zrem).toHaveBeenCalledWith( + 'explore:category:21', + 'tweet-low-score' + ); + }); + + it('should handle tweet without matching category (uses default percentage)', async () => { + // Mock Redis pipeline for fetching + const mock_fetch_pipeline = { + zrevrange: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([[null, ['tweet-no-cat', '100']]]), + }; + mock_redis_service.pipeline.mockReturnValueOnce(mock_fetch_pipeline); + + // Mock fetching tweet data with categories but not matching the Redis category + const mock_tweets = [ + { + tweet_id: 'tweet-no-cat', + num_likes: 100, + num_reposts: 50, + num_quotes: 20, + num_replies: 30, + created_at: new Date(), + categories: [{ category_id: '99', percentage: 50 }], // Different category + }, + ]; + + const mock_query_builder = { + leftJoinAndMapMany: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + andWhere: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue(mock_tweets), + }; + mock_tweet_repository.createQueryBuilder.mockReturnValue(mock_query_builder); + + const mock_update_pipeline = { + zrem: jest.fn().mockReturnThis(), + zadd: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([]), + }; + mock_redis_service.pipeline.mockReturnValueOnce(mock_update_pipeline); + + // Mock Redis pipeline for trim + const mock_trim_pipeline = { + zremrangebyrank: jest.fn().mockReturnThis(), + expire: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([]), + }; + mock_redis_service.pipeline.mockReturnValueOnce(mock_trim_pipeline); + + const result = await service.recalculateExistingTopTweets(); + + // Should use default percentage of 100 + expect(result.tweets_recalculated).toBe(1); + expect(mock_update_pipeline.zadd).toHaveBeenCalled(); + }); + }); + + describe('trimCategoryZSets', () => { + it('should trim and expire category sets', async () => { + (mock_redis_service as any).keys = jest.fn().mockResolvedValue(['explore:category:21']); + + // 1. Fetch Pipeline + const mock_fetch_pipeline = { + zrevrange: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([[null, ['tweet-1', '100']]]), + }; + mock_redis_service.pipeline.mockReturnValueOnce(mock_fetch_pipeline); + + // Mock fetching tweet data + const mock_tweets = [ + { + tweet_id: 'tweet-1', + num_likes: 100, + num_reposts: 50, + num_quotes: 20, + num_replies: 30, + created_at: new Date(), + categories: [{ category_id: '21', percentage: 100 }], + }, + ]; + + const mock_query_builder = { + leftJoinAndMapMany: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + andWhere: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue(mock_tweets), + }; + mock_tweet_repository.createQueryBuilder.mockReturnValue(mock_query_builder); + + // 2. Update Pipeline + const mock_update_pipeline = { + zrem: jest.fn().mockReturnThis(), + zadd: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([]), + }; + mock_redis_service.pipeline.mockReturnValueOnce(mock_update_pipeline); + + // 3. Trim Pipeline + const mock_trim_pipeline = { + zremrangebyrank: jest.fn().mockReturnThis(), + expire: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([]), + }; + mock_redis_service.pipeline.mockReturnValueOnce(mock_trim_pipeline); + + await service.recalculateExistingTopTweets(); + + expect(mock_trim_pipeline.zremrangebyrank).toHaveBeenCalledWith( + 'explore:category:21', + 0, + -(50 + 1) // EXPLORE_CONFIG.MAX_CATEGORY_SIZE is likely 50 + ); + expect(mock_trim_pipeline.expire).toHaveBeenCalled(); + }); + }); + + describe('clearScoreRecalculation', () => { + it('should clear all explore keys', async () => { + (mock_redis_service as any).deleteByPrefix = jest.fn().mockResolvedValue(undefined); + + await service.clearScoreRecalculation(); + + expect(mock_redis_service.deleteByPrefix).toHaveBeenCalledWith('explore:category:'); + }); + }); }); diff --git a/src/background-jobs/explore/explore-jobs.service.ts b/src/background-jobs/explore/explore-jobs.service.ts index aae4d4d4..9f96f0d5 100644 --- a/src/background-jobs/explore/explore-jobs.service.ts +++ b/src/background-jobs/explore/explore-jobs.service.ts @@ -29,9 +29,9 @@ interface ITweetScoreData { export class ExploreJobsService { private readonly logger = new Logger(ExploreJobsService.name); constructor( - @InjectQueue(QUEUE_NAMES.EXPLORE) private explore_queue: Queue, - @InjectRepository(Tweet) private tweet_repository: Repository, - private redis_service: RedisService + @InjectQueue(QUEUE_NAMES.EXPLORE) private readonly explore_queue: Queue, + @InjectRepository(Tweet) private readonly tweet_repository: Repository, + private readonly redis_service: RedisService ) {} // ============================================ @@ -121,15 +121,8 @@ export class ExploreJobsService { return weighted_engagement / denominator; } - private getRecalculationQueryBuilder( - since_hours: number, - max_age_hours: number, - force_all: boolean - ) { - const max_age_date = new Date(); - max_age_date.setHours(max_age_date.getHours() - max_age_hours); - - const query = this.tweet_repository + private getBaseTweetQueryBuilder() { + return this.tweet_repository .createQueryBuilder('tweet') .leftJoinAndMapMany( 'tweet.categories', @@ -148,10 +141,20 @@ export class ExploreJobsService { 'tc.category_id', 'tc.percentage', ]) - .where('tweet.deleted_at IS NULL') - .andWhere('tweet.created_at > :max_age_date', { - max_age_date, - }); + .where('tweet.deleted_at IS NULL'); + } + + private getRecalculationQueryBuilder( + since_hours: number, + max_age_hours: number, + force_all: boolean + ) { + const max_age_date = new Date(); + max_age_date.setHours(max_age_date.getHours() - max_age_hours); + + const query = this.getBaseTweetQueryBuilder().andWhere('tweet.created_at > :max_age_date', { + max_age_date, + }); if (!force_all) { const since_date = new Date(); @@ -200,6 +203,172 @@ export class ExploreJobsService { return tweets as any as ITweetScoreData[]; } + // STEP 1: RECALCULATE EXISTING REDIS TOP-N TWEETS + + async getAllActiveCategoryIds(): Promise { + try { + const pattern = 'explore:category:*'; + const keys = await this.redis_service.keys(pattern); + + const category_ids = keys + .map((key) => { + const match = key.match(/explore:category:(.+)/); + return match ? match[1] : null; + }) + .filter((id) => id !== null); + + this.logger.log(`Found ${category_ids.length} active categories in Redis`); + return category_ids; + } catch (error) { + this.logger.error('Error fetching active category IDs:', error); + return []; + } + } + + async fetchTweetsByIds(tweet_ids: string[]): Promise { + if (tweet_ids.length === 0) return []; + + try { + const tweets = await this.getBaseTweetQueryBuilder() + .andWhere('tweet.tweet_id IN (:...tweet_ids)', { tweet_ids }) + .getMany(); + + return tweets as any as ITweetScoreData[]; + } catch (error) { + this.logger.error('Error fetching tweets by IDs:', error); + return []; + } + } + + //Recalculate scores for existing top-N tweets in Redis + + async recalculateExistingTopTweets(): Promise<{ + categories_processed: number; + tweets_recalculated: number; + }> { + const start_time = Date.now(); + + // Get all active category IDs + const category_ids = await this.getAllActiveCategoryIds(); + + if (category_ids.length === 0) { + this.logger.log('No active categories found in Redis'); + return { categories_processed: 0, tweets_recalculated: 0 }; + } + + // Fetch all category tweets in one Redis pipeline + const fetch_pipeline = this.redis_service.pipeline(); + for (const category_id of category_ids) { + const redis_key = `explore:category:${category_id}`; + fetch_pipeline.zrevrange( + redis_key, + 0, + EXPLORE_CONFIG.MAX_CATEGORY_SIZE - 1, + 'WITHSCORES' + ); + } + + const pipeline_results = await fetch_pipeline.exec(); + + // Validate pipeline results + if (!pipeline_results) { + this.logger.error('Redis pipeline returned null results'); + return { categories_processed: 0, tweets_recalculated: 0 }; + } + + // Parse results and collect all unique tweet IDs + const category_tweets_map = new Map>(); + const all_tweet_ids = new Set(); + + for (let i = 0; i < category_ids.length; i++) { + const category_id = category_ids[i]; + const [error, results] = pipeline_results[i]; + + if (error || !results || !Array.isArray(results) || results.length === 0) { + category_tweets_map.set(category_id, []); + continue; + } + + const top_tweets: Array<{ tweet_id: string; score: number }> = []; + for (let j = 0; j < results.length; j += 2) { + const tweet_id = results[j] as string; + const score = Number.parseFloat(results[j + 1] as string); + top_tweets.push({ tweet_id, score }); + all_tweet_ids.add(tweet_id); + } + + category_tweets_map.set(category_id, top_tweets); + } + + this.logger.log( + `Fetched ${all_tweet_ids.size} unique tweets across ${category_ids.length} categories` + ); + + if (all_tweet_ids.size === 0) { + this.logger.log('No tweets found in any category'); + return { categories_processed: category_ids.length, tweets_recalculated: 0 }; + } + + // Fetch all tweet data in one DB query + const tweet_data = await this.fetchTweetsByIds(Array.from(all_tweet_ids)); + const tweet_data_map = new Map(tweet_data.map((t) => [t.tweet_id, t])); + + // Recalculate scores and prepare Redis updates + const update_pipeline = this.redis_service.pipeline(); + let total_tweets_recalculated = 0; + + for (const category_id of category_ids) { + const top_tweets = category_tweets_map.get(category_id) || []; + const redis_key = `explore:category:${category_id}`; + + for (const top_tweet of top_tweets) { + const tweet = tweet_data_map.get(top_tweet.tweet_id); + + if (!tweet) { + // Tweet not found (deleted or doesn't exist), remove from Redis + update_pipeline.zrem(redis_key, top_tweet.tweet_id); + continue; + } + + // Recalculate score with updated engagement and time decay + const new_score = this.calculateScore(tweet); + + // Find the percentage for this category + const category = tweet.categories?.find((c) => c.category_id === category_id); + const percentage = category?.percentage || 100; + const weighted_score = new_score * (percentage / 100); + + // Update Redis with new score if above threshold + if (weighted_score >= EXPLORE_CONFIG.MIN_SCORE_THRESHOLD) { + update_pipeline.zadd(redis_key, weighted_score, tweet.tweet_id); + total_tweets_recalculated++; + } else { + // Score too low, remove from category + update_pipeline.zrem(redis_key, tweet.tweet_id); + } + } + } + + // Execute all Redis updates atomically + await update_pipeline.exec(); + + // Trim all categories to top 50 + await this.trimCategoryZSets(category_ids); + + const duration = Date.now() - start_time; + this.logger.log( + `Recalculated existing top tweets: ${category_ids.length} categories, ` + + `${total_tweets_recalculated} tweets in ${duration}ms` + ); + + return { + categories_processed: category_ids.length, + tweets_recalculated: total_tweets_recalculated, + }; + } + + // PROCESS RECENT ENGAGEMENT TWEETS + async updateRedisCategoryScores( tweets: { tweet_id: string; @@ -243,7 +412,6 @@ export class ExploreJobsService { const redis_key = `explore:category:${category_id}`; // Keep top MAX_CATEGORY_SIZE tweets - pipeline.zremrangebyrank(redis_key, 0, -(EXPLORE_CONFIG.MAX_CATEGORY_SIZE + 1)); // Category automatic expiration @@ -252,4 +420,9 @@ export class ExploreJobsService { await pipeline.exec(); } + + async clearScoreRecalculation() { + this.logger.log('Clearing explore score recalculation'); + await this.redis_service.deleteByPrefix('explore:category:'); + } } diff --git a/src/background-jobs/hashtag/hashtag.processor.ts b/src/background-jobs/hashtag/hashtag.processor.ts index 983e15e9..9c186012 100644 --- a/src/background-jobs/hashtag/hashtag.processor.ts +++ b/src/background-jobs/hashtag/hashtag.processor.ts @@ -13,7 +13,6 @@ export class HashtagProcessor { @Process(JOB_NAMES.HASHTAG.UPDATE_HASHTAG) async handleUpdateHashtags(job: bull.Job) { - const { hashtags, timestamp } = job.data; await this.trend_service.insertCandidateHashtags(job.data); await this.trend_service.insertCandidateCategories(job.data); diff --git a/src/background-jobs/notifications/clear/clear.processor.spec.ts b/src/background-jobs/notifications/clear/clear.processor.spec.ts index 9edc5ccf..69d260b0 100644 --- a/src/background-jobs/notifications/clear/clear.processor.spec.ts +++ b/src/background-jobs/notifications/clear/clear.processor.spec.ts @@ -180,8 +180,8 @@ describe('ClearProcessor', () => { ); }); - it('should log console message when clearing notifications', async () => { - const console_spy = jest.spyOn(console, 'log').mockImplementation(); + it('should log success message when clearing notifications', async () => { + const logger_spy = jest.spyOn(processor['logger'], 'log').mockImplementation(); const job_data: ClearBackGroundNotificationJobDTO = { user_id: 'user-123', @@ -197,14 +197,11 @@ describe('ClearProcessor', () => { mock_job as Job ); - expect(console_spy).toHaveBeenCalledWith( - 'Clearing notifications for user:', - 'user-123', - 'Tweet IDs:', - ['tweet-1', 'tweet-2'] + expect(logger_spy).toHaveBeenCalledWith( + 'Successfully cleared 2 notification(s) by tweet IDs for user user-123' ); - console_spy.mockRestore(); + logger_spy.mockRestore(); }); it('should log success message after clearing notifications', async () => { @@ -232,5 +229,74 @@ describe('ClearProcessor', () => { logger_log_spy.mockRestore(); }); + + it('should handle database errors gracefully', async () => { + const db_error = new Error('Database connection failed'); + mock_notifications_service.deleteNotificationsByTweetIds.mockRejectedValue(db_error); + + const job_data: ClearBackGroundNotificationJobDTO = { + user_id: 'user-123', + tweet_ids: ['tweet-1'], + }; + + const mock_job = { + id: 'job-error', + data: job_data, + } as Job; + + const logger_error_spy = jest.spyOn(Logger.prototype, 'error').mockImplementation(); + + await expect(processor.handleClearNotification(mock_job)).rejects.toThrow( + 'Database connection failed' + ); + + expect(logger_error_spy).toHaveBeenCalledWith( + 'Error processing clear notification job job-error:', + db_error + ); + + logger_error_spy.mockRestore(); + }); + + it('should handle empty tweet_ids array as invalid', async () => { + const job_data: ClearBackGroundNotificationJobDTO = { + user_id: 'user-123', + tweet_ids: [], + }; + + const mock_job = { + id: 'job-empty', + data: job_data, + } as Job; + + const logger_spy = jest.spyOn(Logger.prototype, 'warn').mockImplementation(); + + await processor.handleClearNotification(mock_job); + + expect(mock_notifications_service.deleteNotificationsByTweetIds).not.toHaveBeenCalled(); + expect(logger_spy).toHaveBeenCalled(); + + logger_spy.mockRestore(); + }); + + it('should handle large arrays of tweet IDs', async () => { + const large_tweet_ids = Array.from({ length: 100 }, (_, i) => `tweet-${i}`); + const job_data: ClearBackGroundNotificationJobDTO = { + user_id: 'user-123', + tweet_ids: large_tweet_ids, + }; + + const mock_job = { + id: 'job-large', + data: job_data, + } as Job; + + await processor.handleClearNotification(mock_job); + + expect(mock_notifications_service.deleteNotificationsByTweetIds).toHaveBeenCalledWith( + 'user-123', + large_tweet_ids + ); + }); }); }); diff --git a/src/background-jobs/notifications/clear/clear.processor.ts b/src/background-jobs/notifications/clear/clear.processor.ts index da1c804f..252f4688 100644 --- a/src/background-jobs/notifications/clear/clear.processor.ts +++ b/src/background-jobs/notifications/clear/clear.processor.ts @@ -24,7 +24,6 @@ export class ClearProcessor { } if (tweet_ids?.length) { - console.log('Clearing notifications for user:', user_id, 'Tweet IDs:', tweet_ids); await this.notifications_service.deleteNotificationsByTweetIds(user_id, tweet_ids); this.logger.log( `Successfully cleared ${tweet_ids.length} notification(s) by tweet IDs for user ${user_id}` @@ -32,7 +31,6 @@ export class ClearProcessor { } if (user_ids?.length) { - console.log('Clearing notifications for user:', user_id, 'User IDs:', user_ids); await this.notifications_service.cleanupNotificationsByUserIds(user_id, user_ids); this.logger.log( `Successfully cleared ${user_ids.length} notification(s) by user IDs for user ${user_id}` diff --git a/src/background-jobs/notifications/clear/clear.service.ts b/src/background-jobs/notifications/clear/clear.service.ts index 116aba0e..87978477 100644 --- a/src/background-jobs/notifications/clear/clear.service.ts +++ b/src/background-jobs/notifications/clear/clear.service.ts @@ -12,7 +12,7 @@ import { ClearBackGroundNotificationJobDTO } from './clear.dto'; @Injectable() export class ClearJobService extends BackgroundJobsService { - constructor(@InjectQueue(QUEUE_NAMES.NOTIFICATION) private clear_queue: Queue) { + constructor(@InjectQueue(QUEUE_NAMES.NOTIFICATION) private readonly clear_queue: Queue) { super(clear_queue, JOB_NAMES.NOTIFICATION.CLEAR, JOB_PRIORITIES.HIGH, JOB_DELAYS.IMMEDIATE); } diff --git a/src/background-jobs/notifications/follow/follow.dto.ts b/src/background-jobs/notifications/follow/follow.dto.ts index dee2042b..7694c956 100644 --- a/src/background-jobs/notifications/follow/follow.dto.ts +++ b/src/background-jobs/notifications/follow/follow.dto.ts @@ -4,6 +4,7 @@ export class FollowBackGroundNotificationJobDTO { action: 'add' | 'remove'; + follower_username?: string; follower_name?: string; follower_avatar_url?: string; } diff --git a/src/background-jobs/notifications/follow/follow.processor.spec.ts b/src/background-jobs/notifications/follow/follow.processor.spec.ts index b5cb9f7c..270c532f 100644 --- a/src/background-jobs/notifications/follow/follow.processor.spec.ts +++ b/src/background-jobs/notifications/follow/follow.processor.spec.ts @@ -28,7 +28,9 @@ describe('FollowProcessor', () => { { provide: NotificationsService, useValue: { - removeFollowNotification: jest.fn().mockResolvedValue(true), + removeFollowNotification: jest + .fn() + .mockResolvedValue('notification-id-123'), sendNotificationOnly: jest.fn(), saveNotificationAndSend: jest.fn(), }, @@ -125,10 +127,10 @@ describe('FollowProcessor', () => { expect(notifications_service.sendNotificationOnly).toHaveBeenCalledWith( NotificationType.FOLLOW, unfollow_data.followed_id, - expect.objectContaining({ - type: NotificationType.FOLLOW, + { + id: 'notification-id-123', action: 'remove', - }) + } ); }); diff --git a/src/background-jobs/notifications/follow/follow.processor.ts b/src/background-jobs/notifications/follow/follow.processor.ts index b5fe2680..bc29b34d 100644 --- a/src/background-jobs/notifications/follow/follow.processor.ts +++ b/src/background-jobs/notifications/follow/follow.processor.ts @@ -25,20 +25,18 @@ export class FollowProcessor { const { followed_id, follower_id, action } = job.data; if (action === 'remove') { - // Remove the notification from MongoDB - const was_deleted = await this.notifications_service.removeFollowNotification( + const notification_id = await this.notifications_service.removeFollowNotification( followed_id, follower_id ); - // Only send socket notification if deletion succeeded - if (was_deleted) { + if (notification_id) { this.notifications_service.sendNotificationOnly( NotificationType.FOLLOW, followed_id, { - type: NotificationType.FOLLOW, - ...job.data, + id: notification_id, + action: 'remove', } ); } diff --git a/src/background-jobs/notifications/follow/follow.service.ts b/src/background-jobs/notifications/follow/follow.service.ts index ecb5678b..8ec04a47 100644 --- a/src/background-jobs/notifications/follow/follow.service.ts +++ b/src/background-jobs/notifications/follow/follow.service.ts @@ -12,7 +12,7 @@ import { BackgroundJobsService } from 'src/background-jobs/background-jobs'; @Injectable() export class FollowJobService extends BackgroundJobsService { - constructor(@InjectQueue(QUEUE_NAMES.NOTIFICATION) private follow_queue: Queue) { + constructor(@InjectQueue(QUEUE_NAMES.NOTIFICATION) private readonly follow_queue: Queue) { super( follow_queue, JOB_NAMES.NOTIFICATION.FOLLOW, diff --git a/src/background-jobs/notifications/like/like.processor.spec.ts b/src/background-jobs/notifications/like/like.processor.spec.ts index 979e2d81..497578a0 100644 --- a/src/background-jobs/notifications/like/like.processor.spec.ts +++ b/src/background-jobs/notifications/like/like.processor.spec.ts @@ -168,7 +168,9 @@ describe('LikeProcessor', () => { describe('handleSendLikeNotification - remove action', () => { it('should remove like notification successfully', async () => { - notifications_service.removeLikeNotification = jest.fn().mockResolvedValue(true); + notifications_service.removeLikeNotification = jest + .fn() + .mockResolvedValue('notification-id-123'); notifications_service.sendNotificationOnly = jest.fn(); const remove_job_data: LikeBackGroundNotificationJobDTO = { @@ -195,14 +197,16 @@ describe('LikeProcessor', () => { expect(notifications_service.sendNotificationOnly).toHaveBeenCalledWith( NotificationType.LIKE, 'user-123', - expect.objectContaining({ - liked_by: 'user-456', - }) + { + id: 'notification-id-123', + ...remove_job_data, + action: 'remove', + } ); }); it('should not send notification if removal failed', async () => { - notifications_service.removeLikeNotification = jest.fn().mockResolvedValue(false); + notifications_service.removeLikeNotification = jest.fn().mockResolvedValue(null); notifications_service.sendNotificationOnly = jest.fn(); const remove_job_data: LikeBackGroundNotificationJobDTO = { diff --git a/src/background-jobs/notifications/like/like.processor.ts b/src/background-jobs/notifications/like/like.processor.ts index 83d89dd8..d85fe8d3 100644 --- a/src/background-jobs/notifications/like/like.processor.ts +++ b/src/background-jobs/notifications/like/like.processor.ts @@ -27,24 +27,23 @@ export class LikeProcessor { const { like_to, liked_by, tweet, action, tweet_id } = job.data; if (action === 'remove') { - // Remove the notification from MongoDB - let was_deleted = false; + let notification_id: string | null = null; if (tweet_id) { - was_deleted = await this.notifications_service.removeLikeNotification( + notification_id = await this.notifications_service.removeLikeNotification( like_to, tweet_id, liked_by ); } - if (was_deleted) { + if (notification_id) { this.notifications_service.sendNotificationOnly( NotificationType.LIKE, like_to, { - type: NotificationType.LIKE, + id: notification_id, ...job.data, - liked_by, + action: 'remove', } ); } diff --git a/src/background-jobs/notifications/like/like.service.spec.ts b/src/background-jobs/notifications/like/like.service.spec.ts index 587cddb3..92e55bbd 100644 --- a/src/background-jobs/notifications/like/like.service.spec.ts +++ b/src/background-jobs/notifications/like/like.service.spec.ts @@ -139,5 +139,41 @@ describe('LikeJobService', () => { const result = await service.queueLikeNotification(mock_like_dto); expect(result).toEqual({ success: false, error: 'Queue error' }); }); + + it('should handle different tweet object structures', async () => { + const dto_with_complex_tweet: LikeBackGroundNotificationJobDTO = { + like_to: 'user-123', + liked_by: 'user-456', + tweet: { + tweet_id: 'tweet-789', + content: 'Complex tweet', + user: { id: 'user-123', username: 'testuser' }, + } as any, + }; + + const result = await service.queueLikeNotification(dto_with_complex_tweet); + + expect(mock_queue.add).toHaveBeenCalledWith( + JOB_NAMES.NOTIFICATION.LIKE, + dto_with_complex_tweet, + expect.any(Object) + ); + expect(result.success).toBe(true); + }); + + it('should queue job with action parameter', async () => { + const dto_with_action = { + ...mock_like_dto, + action: 'add' as const, + }; + + await service.queueLikeNotification(dto_with_action); + + expect(mock_queue.add).toHaveBeenCalledWith( + JOB_NAMES.NOTIFICATION.LIKE, + dto_with_action, + expect.any(Object) + ); + }); }); }); diff --git a/src/background-jobs/notifications/like/like.service.ts b/src/background-jobs/notifications/like/like.service.ts index ae54c5a5..3a8bbb18 100644 --- a/src/background-jobs/notifications/like/like.service.ts +++ b/src/background-jobs/notifications/like/like.service.ts @@ -12,7 +12,7 @@ import type { Queue } from 'bull'; @Injectable() export class LikeJobService extends BackgroundJobsService { - constructor(@InjectQueue(QUEUE_NAMES.NOTIFICATION) private reply_queue: Queue) { + constructor(@InjectQueue(QUEUE_NAMES.NOTIFICATION) private readonly reply_queue: Queue) { super(reply_queue, JOB_NAMES.NOTIFICATION.LIKE, JOB_PRIORITIES.HIGH, JOB_DELAYS.IMMEDIATE); } diff --git a/src/background-jobs/notifications/mention/mention.dto.ts b/src/background-jobs/notifications/mention/mention.dto.ts index e7e2bfc3..6c61debf 100644 --- a/src/background-jobs/notifications/mention/mention.dto.ts +++ b/src/background-jobs/notifications/mention/mention.dto.ts @@ -8,7 +8,7 @@ export class MentionBackGroundNotificationJobDTO { parent_tweet?: TweetResponseDTO; mentioned_by: string; - mentioned_usernames?: string[]; + mentioned_user_ids?: string[]; tweet_type: 'tweet' | 'quote' | 'reply'; diff --git a/src/background-jobs/notifications/mention/mention.processor.spec.ts b/src/background-jobs/notifications/mention/mention.processor.spec.ts index ceff0c0c..1276868f 100644 --- a/src/background-jobs/notifications/mention/mention.processor.spec.ts +++ b/src/background-jobs/notifications/mention/mention.processor.spec.ts @@ -86,11 +86,6 @@ describe('MentionProcessor', () => { user_id: 'user-author', }; - const mock_users = [ - { id: 'user-1', username: 'user1' }, - { id: 'user-2', username: 'user2' }, - ]; - const mock_mentioner = { id: 'user-author', username: 'author', @@ -99,11 +94,10 @@ describe('MentionProcessor', () => { avatar_url: 'avatar.jpg', }; - user_repository.find.mockResolvedValue(mock_users as User[]); user_repository.findOne.mockResolvedValue(mock_mentioner as User); const job = mock_job({ - mentioned_usernames: ['user1', 'user2'], + mentioned_user_ids: ['user-1', 'user-2'], mentioned_by: 'user-author', tweet_id: 'tweet-123', tweet: mock_tweet as unknown as Tweet, @@ -113,10 +107,6 @@ describe('MentionProcessor', () => { await processor.handleSendMentionNotification(job); - expect(user_repository.find).toHaveBeenCalledWith({ - where: [{ username: 'user1' }, { username: 'user2' }], - select: ['id'], - }); expect(user_repository.findOne).toHaveBeenCalledTimes(2); expect(notifications_service.saveNotificationAndSend).toHaveBeenCalledTimes(2); }); @@ -128,12 +118,8 @@ describe('MentionProcessor', () => { user_id: 'user-author', }; - const mock_users = [{ id: 'user-author', username: 'author' }]; - - user_repository.find.mockResolvedValue(mock_users as User[]); - const job = mock_job({ - mentioned_usernames: ['author'], + mentioned_user_ids: ['user-author'], mentioned_by: 'user-author', tweet_id: 'tweet-123', tweet: mock_tweet as unknown as Tweet, @@ -158,29 +144,26 @@ describe('MentionProcessor', () => { text: 'Original tweet', }; - const mock_users = [{ id: 'user-1', username: 'user1' }]; - const mock_mentioner = { id: 'user-author', - username: 'author', - email: 'author@test.com', - name: 'Author', - avatar_url: 'avatar.jpg', + username: 'author-user', + email: 'author@example.com', + name: 'Author User', + avatar_url: 'http://example.com/avatar.jpg', }; - user_repository.find.mockResolvedValue(mock_users as User[]); - user_repository.findOne.mockResolvedValue(mock_mentioner as User); - const job = mock_job({ - mentioned_usernames: ['user1'], + mentioned_user_ids: ['user-1'], mentioned_by: 'user-author', - tweet_id: 'quote-123', tweet: mock_tweet as unknown as Tweet, parent_tweet: mock_parent_tweet as any, tweet_type: 'quote', action: 'add', }); + user_repository.findOne.mockResolvedValue(mock_mentioner as any); + notifications_service.saveNotificationAndSend.mockResolvedValue(undefined); + await processor.handleSendMentionNotification(job); expect(notifications_service.saveNotificationAndSend).toHaveBeenCalledWith( @@ -192,10 +175,7 @@ describe('MentionProcessor', () => { mentioned_by: 'user-author', tweet_type: 'quote', }), - expect.objectContaining({ - type: NotificationType.MENTION, - tweet_type: 'quote', - }) + expect.anything() ); }); @@ -211,8 +191,6 @@ describe('MentionProcessor', () => { text: 'Original tweet', }; - const mock_users = [{ id: 'user-1', username: 'user1' }]; - const mock_mentioner = { id: 'user-author', username: 'author', @@ -221,11 +199,10 @@ describe('MentionProcessor', () => { avatar_url: 'avatar.jpg', }; - user_repository.find.mockResolvedValue(mock_users as User[]); user_repository.findOne.mockResolvedValue(mock_mentioner as User); const job = mock_job({ - mentioned_usernames: ['user1'], + mentioned_user_ids: ['user-1'], mentioned_by: 'user-author', tweet_id: 'reply-123', tweet: mock_tweet as unknown as Tweet, @@ -253,7 +230,7 @@ describe('MentionProcessor', () => { const logger_spy = jest.spyOn(processor['logger'], 'warn'); const job = mock_job({ - mentioned_usernames: ['user1'], + mentioned_user_ids: ['user-1'], mentioned_by: 'user-author', tweet_id: 'tweet-123', tweet_type: 'tweet', @@ -275,15 +252,12 @@ describe('MentionProcessor', () => { user_id: 'user-author', }; - const mock_users = [{ id: 'user-1', username: 'user1' }]; - const logger_spy = jest.spyOn(processor['logger'], 'warn'); - user_repository.find.mockResolvedValue(mock_users as User[]); user_repository.findOne.mockResolvedValue(null); const job = mock_job({ - mentioned_usernames: ['user1'], + mentioned_user_ids: ['user-1'], mentioned_by: 'user-author', tweet_id: 'tweet-123', tweet: mock_tweet as unknown as Tweet, @@ -300,16 +274,12 @@ describe('MentionProcessor', () => { describe('handleSendMentionNotification - remove action', () => { it('should remove mention notifications for multiple users', async () => { - const mock_users = [ - { id: 'user-1', username: 'user1' }, - { id: 'user-2', username: 'user2' }, - ]; - - user_repository.find.mockResolvedValue(mock_users as User[]); - notifications_service.removeMentionNotification.mockResolvedValue(true); + notifications_service.removeMentionNotification.mockResolvedValue( + 'notification-id-123' + ); const job = mock_job({ - mentioned_usernames: ['user1', 'user2'], + mentioned_user_ids: ['user-1', 'user-2'], mentioned_by: 'user-author', tweet_id: 'tweet-123', action: 'remove', @@ -324,16 +294,22 @@ describe('MentionProcessor', () => { 'user-author' ); expect(notifications_service.sendNotificationOnly).toHaveBeenCalledTimes(2); + expect(notifications_service.sendNotificationOnly).toHaveBeenCalledWith( + NotificationType.MENTION, + 'user-1', + { + id: 'notification-id-123', + ...job.data, + action: 'remove', + } + ); }); it('should skip sending notification if removal failed', async () => { - const mock_users = [{ id: 'user-1', username: 'user1' }]; - - user_repository.find.mockResolvedValue(mock_users as User[]); - notifications_service.removeMentionNotification.mockResolvedValue(false); + notifications_service.removeMentionNotification.mockResolvedValue(null); const job = mock_job({ - mentioned_usernames: ['user1'], + mentioned_user_ids: ['user-1'], mentioned_by: 'user-author', tweet_id: 'tweet-123', action: 'remove', @@ -346,16 +322,12 @@ describe('MentionProcessor', () => { }); it('should not remove mention for the author themselves', async () => { - const mock_users = [ - { id: 'user-author', username: 'author' }, - { id: 'user-1', username: 'user1' }, - ]; - - user_repository.find.mockResolvedValue(mock_users as User[]); - notifications_service.removeMentionNotification.mockResolvedValue(true); + notifications_service.removeMentionNotification.mockResolvedValue( + 'notification-id-123' + ); const job = mock_job({ - mentioned_usernames: ['author', 'user1'], + mentioned_user_ids: ['user-author', 'user-1'], mentioned_by: 'user-author', tweet_id: 'tweet-123', action: 'remove', @@ -373,7 +345,7 @@ describe('MentionProcessor', () => { it('should handle empty mentioned_usernames array', async () => { const job = mock_job({ - mentioned_usernames: [], + mentioned_user_ids: [], mentioned_by: 'user-author', tweet_id: 'tweet-123', action: 'remove', @@ -387,14 +359,13 @@ describe('MentionProcessor', () => { it('should handle missing tweet_id', async () => { const job = mock_job({ - mentioned_usernames: ['user1'], + mentioned_user_ids: ['user-1'], mentioned_by: 'user-author', action: 'remove', }); await processor.handleSendMentionNotification(job); - expect(user_repository.find).not.toHaveBeenCalled(); expect(notifications_service.removeMentionNotification).not.toHaveBeenCalled(); }); }); @@ -409,10 +380,11 @@ describe('MentionProcessor', () => { const logger_spy = jest.spyOn(processor['logger'], 'error'); const error = new Error('Database connection failed'); - user_repository.find.mockRejectedValue(error); + + user_repository.findOne.mockRejectedValue(error); const job = mock_job({ - mentioned_usernames: ['user1'], + mentioned_user_ids: ['user-1'], mentioned_by: 'user-author', tweet_id: 'tweet-123', tweet: mock_tweet as unknown as Tweet, @@ -434,8 +406,6 @@ describe('MentionProcessor', () => { user_id: 'user-author', }; - const mock_users = [{ id: 'user-1', username: 'user1' }]; - const mock_mentioner = { id: 'user-author', username: 'author', @@ -444,14 +414,13 @@ describe('MentionProcessor', () => { avatar_url: 'avatar.jpg', }; - user_repository.find.mockResolvedValue(mock_users as User[]); user_repository.findOne.mockResolvedValue(mock_mentioner as User); const error = new Error('Save failed'); notifications_service.saveNotificationAndSend.mockRejectedValue(error); const job = mock_job({ - mentioned_usernames: ['user1'], + mentioned_user_ids: ['user-1'], mentioned_by: 'user-author', tweet_id: 'tweet-123', tweet: mock_tweet as unknown as Tweet, diff --git a/src/background-jobs/notifications/mention/mention.processor.ts b/src/background-jobs/notifications/mention/mention.processor.ts index d7bdbcd3..c6fc2146 100644 --- a/src/background-jobs/notifications/mention/mention.processor.ts +++ b/src/background-jobs/notifications/mention/mention.processor.ts @@ -31,7 +31,7 @@ export class MentionProcessor { async handleSendMentionNotification(job: Job) { try { const { - mentioned_usernames, + mentioned_user_ids, mentioned_by, tweet_id, tweet, @@ -41,34 +41,26 @@ export class MentionProcessor { } = job.data; if (action === 'remove') { - // For remove action, we need usernames to find user IDs - if (!mentioned_usernames || mentioned_usernames.length === 0 || !tweet_id) return; + if (!mentioned_user_ids || mentioned_user_ids.length === 0 || !tweet_id) return; - // Fetch user IDs from usernames - const users = await this.user_repository.find({ - where: mentioned_usernames.map((username) => ({ username })), - select: ['id'], - }); + for (const user_id of mentioned_user_ids) { + if (user_id === mentioned_by) continue; - // Queue removal for each mentioned user - for (const user of users) { - if (user.id === mentioned_by) continue; - - const was_deleted = await this.notifications_service.removeMentionNotification( - user.id, - tweet_id, - mentioned_by - ); + const notification_id = + await this.notifications_service.removeMentionNotification( + user_id, + tweet_id, + mentioned_by + ); - if (was_deleted) { + if (notification_id) { this.notifications_service.sendNotificationOnly( NotificationType.MENTION, - user.id, + user_id, { - type: NotificationType.MENTION, - tweet_id, - mentioned_by, - action, + id: notification_id, + ...job.data, + action: 'remove', } ); } @@ -77,22 +69,12 @@ export class MentionProcessor { if (!tweet) { this.logger.warn(`Tweet data not provided in job ${job.id}.`); return; - } - - // For add action with usernames (batch processing) - else if (mentioned_usernames && mentioned_usernames.length > 0) { - // Fetch user IDs from usernames - const users = await this.user_repository.find({ - where: mentioned_usernames.map((username) => ({ username })), - select: ['id'], - }); - - // Process mention for each user - for (const user of users) { - if (user.id === mentioned_by) continue; + } else if (mentioned_user_ids && mentioned_user_ids.length > 0) { + for (const user_id of mentioned_user_ids) { + if (user_id === mentioned_by) continue; await this.processMentionForUser( - user.id, + user_id, mentioned_by, tweet, parent_tweet, @@ -126,7 +108,6 @@ export class MentionProcessor { mentioner.id = mentioned_by; - // Build payload const payload: any = { type: NotificationType.MENTION, mentioned_by: mentioner, @@ -134,7 +115,6 @@ export class MentionProcessor { }; if (tweet_type === 'quote' && parent_tweet) { - // Use parent_tweet from DTO (already formatted) const quote = plainToInstance( TweetQuoteResponseDTO, { @@ -145,7 +125,6 @@ export class MentionProcessor { ); payload.tweet = quote; } else { - // For normal tweets or replies payload.tweet = plainToInstance(TweetResponseDTO, tweet, { excludeExtraneousValues: true, }); diff --git a/src/background-jobs/notifications/mention/mention.service.spec.ts b/src/background-jobs/notifications/mention/mention.service.spec.ts index b711b502..47fd9a4c 100644 --- a/src/background-jobs/notifications/mention/mention.service.spec.ts +++ b/src/background-jobs/notifications/mention/mention.service.spec.ts @@ -168,5 +168,49 @@ describe('MentionJobService', () => { expect(result).toEqual({ success: true, job_id: 'job-empty' }); }); + + it('should handle mention in quote tweet', async () => { + const dto: MentionBackGroundNotificationJobDTO = { + mentioned_usernames: ['user9'], + mentioned_by: 'author-quote', + tweet_id: 'tweet-quote', + tweet: { tweet_id: 'tweet-quote' } as any, + parent_tweet: { tweet_id: 'quoted-tweet' } as any, + tweet_type: 'quote', + action: 'add', + }; + + const mock_job = { id: 'job-quote' }; + queue.add.mockResolvedValue(mock_job as any); + + const result = await service.queueMentionNotification(dto); + + expect(result).toEqual({ success: true, job_id: 'job-quote' }); + }); + + it('should handle mention with default priority and delay', async () => { + const dto: MentionBackGroundNotificationJobDTO = { + mentioned_usernames: ['user10'], + mentioned_by: 'author-default', + tweet_id: 'tweet-default', + tweet_type: 'tweet', + action: 'add', + }; + + const mock_job = { id: 'job-default' }; + queue.add.mockResolvedValue(mock_job as any); + + const result = await service.queueMentionNotification(dto); + + expect(queue.add).toHaveBeenCalledWith( + expect.any(String), + dto, + expect.objectContaining({ + attempts: 3, + backoff: expect.any(Object), + }) + ); + expect(result.success).toBe(true); + }); }); }); diff --git a/src/background-jobs/notifications/mention/mention.service.ts b/src/background-jobs/notifications/mention/mention.service.ts index 8636ebbb..01ee5fc1 100644 --- a/src/background-jobs/notifications/mention/mention.service.ts +++ b/src/background-jobs/notifications/mention/mention.service.ts @@ -12,7 +12,7 @@ import type { Queue } from 'bull'; @Injectable() export class MentionJobService extends BackgroundJobsService { - constructor(@InjectQueue(QUEUE_NAMES.NOTIFICATION) private mention_queue: Queue) { + constructor(@InjectQueue(QUEUE_NAMES.NOTIFICATION) private readonly mention_queue: Queue) { super( mention_queue, JOB_NAMES.NOTIFICATION.MENTION, diff --git a/src/background-jobs/notifications/message/message.service.ts b/src/background-jobs/notifications/message/message.service.ts index 624637cd..74af9088 100644 --- a/src/background-jobs/notifications/message/message.service.ts +++ b/src/background-jobs/notifications/message/message.service.ts @@ -12,7 +12,7 @@ import type { Queue } from 'bull'; @Injectable() export class MessageJobService extends BackgroundJobsService { - constructor(@InjectQueue(QUEUE_NAMES.NOTIFICATION) private message_queue: Queue) { + constructor(@InjectQueue(QUEUE_NAMES.NOTIFICATION) private readonly message_queue: Queue) { super( message_queue, JOB_NAMES.NOTIFICATION.MESSAGE, diff --git a/src/background-jobs/notifications/quote/quote.processor.spec.ts b/src/background-jobs/notifications/quote/quote.processor.spec.ts index 740cf4ba..f6b83c8b 100644 --- a/src/background-jobs/notifications/quote/quote.processor.spec.ts +++ b/src/background-jobs/notifications/quote/quote.processor.spec.ts @@ -232,7 +232,7 @@ describe('QuoteProcessor', () => { describe('handleSendQuoteNotification - remove action', () => { it('should remove quote notification successfully', async () => { - notifications_service.removeQuoteNotification.mockResolvedValue(true); + notifications_service.removeQuoteNotification.mockResolvedValue('notification-id-123'); const job = mock_job({ quote_to: 'quote-to-id', @@ -251,14 +251,16 @@ describe('QuoteProcessor', () => { expect(notifications_service.sendNotificationOnly).toHaveBeenCalledWith( NotificationType.QUOTE, 'quote-to-id', - expect.objectContaining({ - quoted_by: 'quoter-id', - }) + { + id: 'notification-id-123', + ...job.data, + action: 'remove', + } ); }); it('should not send notification if removal failed', async () => { - notifications_service.removeQuoteNotification.mockResolvedValue(false); + notifications_service.removeQuoteNotification.mockResolvedValue(null); const job = mock_job({ quote_to: 'quote-to-id', diff --git a/src/background-jobs/notifications/quote/quote.processor.ts b/src/background-jobs/notifications/quote/quote.processor.ts index 4fc11cd9..07e58aeb 100644 --- a/src/background-jobs/notifications/quote/quote.processor.ts +++ b/src/background-jobs/notifications/quote/quote.processor.ts @@ -34,23 +34,23 @@ export class QuoteProcessor { job.data; if (action === 'remove') { - let was_deleted = false; + let notification_id: string | null = null; if (quote_to && quote_tweet_id) { - was_deleted = await this.notifications_service.removeQuoteNotification( + notification_id = await this.notifications_service.removeQuoteNotification( quote_to, quote_tweet_id, quoted_by ); } - if (was_deleted) { + if (notification_id) { this.notifications_service.sendNotificationOnly( NotificationType.QUOTE, quote_to, { - type: NotificationType.QUOTE, + id: notification_id, ...job.data, - quoted_by, + action: 'remove', } ); } diff --git a/src/background-jobs/notifications/quote/quote.service.spec.ts b/src/background-jobs/notifications/quote/quote.service.spec.ts index 72e3aa9c..e92a5d13 100644 --- a/src/background-jobs/notifications/quote/quote.service.spec.ts +++ b/src/background-jobs/notifications/quote/quote.service.spec.ts @@ -163,5 +163,57 @@ describe('QuoteJobService', () => { expect(result).toEqual({ success: true, job_id: 'job-remove' }); }); + + it('should handle quote with complex tweet structures', async () => { + const dto: QuoteBackGroundNotificationJobDTO = { + quote_to: 'author-complex', + quoted_by: 'quoter-complex', + quote_tweet_id: 'quote-complex', + quote_tweet: { + tweet_id: 'quote-complex', + content: 'Complex quote with media', + media: [{ url: 'image.jpg' }], + user: { id: 'quoter-complex', username: 'quoter' }, + } as any, + parent_tweet: { + tweet_id: 'parent-complex', + content: 'Original complex tweet', + } as any, + action: 'add', + }; + + const mock_job = { id: 'job-complex' }; + queue.add.mockResolvedValue(mock_job as any); + + const result = await service.queueQuoteNotification(dto); + + expect(result).toEqual({ success: true, job_id: 'job-complex' }); + }); + + it('should apply default job options correctly', async () => { + const dto: QuoteBackGroundNotificationJobDTO = { + quote_to: 'author-defaults', + quoted_by: 'quoter-defaults', + quote_tweet_id: 'quote-defaults', + action: 'add', + }; + + const mock_job = { id: 'job-defaults' }; + queue.add.mockResolvedValue(mock_job as any); + + const result = await service.queueQuoteNotification(dto); + + expect(queue.add).toHaveBeenCalledWith( + expect.any(String), + dto, + expect.objectContaining({ + attempts: 3, + backoff: expect.any(Object), + removeOnComplete: 10, + removeOnFail: 5, + }) + ); + expect(result.success).toBe(true); + }); }); }); diff --git a/src/background-jobs/notifications/quote/quote.service.ts b/src/background-jobs/notifications/quote/quote.service.ts index 903330cd..77663bc2 100644 --- a/src/background-jobs/notifications/quote/quote.service.ts +++ b/src/background-jobs/notifications/quote/quote.service.ts @@ -12,7 +12,7 @@ import { QuoteBackGroundNotificationJobDTO } from './quote.dto'; @Injectable() export class QuoteJobService extends BackgroundJobsService { - constructor(@InjectQueue(QUEUE_NAMES.NOTIFICATION) private quote_queue: Queue) { + constructor(@InjectQueue(QUEUE_NAMES.NOTIFICATION) private readonly quote_queue: Queue) { super(quote_queue, JOB_NAMES.NOTIFICATION.QUOTE, JOB_PRIORITIES.HIGH, JOB_DELAYS.IMMEDIATE); } diff --git a/src/background-jobs/notifications/reply/reply.dto.ts b/src/background-jobs/notifications/reply/reply.dto.ts index 4c350972..8fda59bf 100644 --- a/src/background-jobs/notifications/reply/reply.dto.ts +++ b/src/background-jobs/notifications/reply/reply.dto.ts @@ -5,7 +5,7 @@ export class ReplyBackGroundNotificationJobDTO { reply_tweet?: Tweet; reply_tweet_id?: string; - original_tweet_id?: string; + original_tweet?: Tweet; replied_by: string; reply_to: string; diff --git a/src/background-jobs/notifications/reply/reply.processor.spec.ts b/src/background-jobs/notifications/reply/reply.processor.spec.ts index 999f3eaa..6b15fa73 100644 --- a/src/background-jobs/notifications/reply/reply.processor.spec.ts +++ b/src/background-jobs/notifications/reply/reply.processor.spec.ts @@ -31,12 +31,17 @@ describe('ReplyProcessor', () => { content: 'This is a reply', } as any; + const mock_original_tweet = { + tweet_id: 'original-tweet-123', + content: 'This is the original tweet', + } as any; + const mock_job_data: ReplyBackGroundNotificationJobDTO = { reply_to: 'user-123', replied_by: 'user-456', reply_tweet: mock_tweet, reply_tweet_id: 'reply-tweet-123', - original_tweet_id: 'original-tweet-123', + original_tweet: mock_original_tweet, conversation_id: 'conversation-123', action: 'add', }; @@ -242,7 +247,9 @@ describe('ReplyProcessor', () => { describe('handleSendReplyNotification - remove action', () => { it('should remove reply notification successfully', async () => { - notifications_service.removeReplyNotification = jest.fn().mockResolvedValue(true); + notifications_service.removeReplyNotification = jest + .fn() + .mockResolvedValue('notification-id-123'); notifications_service.sendNotificationOnly = jest.fn(); const remove_job_data: ReplyBackGroundNotificationJobDTO = { @@ -271,14 +278,16 @@ describe('ReplyProcessor', () => { expect(notifications_service.sendNotificationOnly).toHaveBeenCalledWith( NotificationType.REPLY, 'user-123', - expect.objectContaining({ - replied_by: 'user-456', - }) + { + id: 'notification-id-123', + ...remove_job_data, + action: 'remove', + } ); }); it('should not send notification if removal failed', async () => { - notifications_service.removeReplyNotification = jest.fn().mockResolvedValue(false); + notifications_service.removeReplyNotification = jest.fn().mockResolvedValue(null); notifications_service.sendNotificationOnly = jest.fn(); const remove_job_data: ReplyBackGroundNotificationJobDTO = { diff --git a/src/background-jobs/notifications/reply/reply.processor.ts b/src/background-jobs/notifications/reply/reply.processor.ts index 3c3be42d..5d77afff 100644 --- a/src/background-jobs/notifications/reply/reply.processor.ts +++ b/src/background-jobs/notifications/reply/reply.processor.ts @@ -32,27 +32,27 @@ export class ReplyProcessor { replied_by, reply_tweet_id, reply_tweet, - original_tweet_id, + original_tweet, conversation_id, action, } = job.data; let payload: any; if (action === 'remove') { - let was_deleted = false; + let notification_id: string | null = null; if (reply_to && reply_tweet_id) { - was_deleted = await this.notifications_service.removeReplyNotification( + notification_id = await this.notifications_service.removeReplyNotification( reply_to, reply_tweet_id, replied_by ); } - if (was_deleted) { + if (notification_id) { payload = { - type: NotificationType.REPLY, + id: notification_id, ...job.data, - replied_by, + action: 'remove', }; this.notifications_service.sendNotificationOnly( @@ -79,12 +79,17 @@ export class ReplyProcessor { return; } + if (!original_tweet) { + this.logger.warn(`Original tweet not found.`); + return; + } + const notification_entity: ReplyNotificationEntity = Object.assign( new ReplyNotificationEntity(), { type: NotificationType.REPLY, reply_tweet_id: reply_tweet.tweet_id, - original_tweet_id, + original_tweet_id: original_tweet.tweet_id, replied_by, conversation_id, created_at: new Date(), diff --git a/src/background-jobs/notifications/reply/reply.service.spec.ts b/src/background-jobs/notifications/reply/reply.service.spec.ts index a8cb6d00..ad1fae92 100644 --- a/src/background-jobs/notifications/reply/reply.service.spec.ts +++ b/src/background-jobs/notifications/reply/reply.service.spec.ts @@ -142,5 +142,63 @@ describe('ReplyJobService', () => { const result = await service.queueReplyNotification(mock_reply_dto); expect(result).toEqual({ success: false, error: 'Queue error' }); }); + + it('should queue reply with complete conversation context', async () => { + const dto_with_context: ReplyBackGroundNotificationJobDTO = { + reply_to: 'user-123', + replied_by: 'user-456', + tweet: { + tweet_id: 'reply-tweet-123', + content: 'This is a reply', + user: { id: 'user-456', username: 'replier' }, + } as any, + reply_tweet_id: 'reply-tweet-123', + original_tweet_id: 'original-tweet-123', + conversation_id: 'conversation-123', + }; + + const result = await service.queueReplyNotification(dto_with_context); + + expect(mock_queue.add).toHaveBeenCalledWith( + JOB_NAMES.NOTIFICATION.REPLY, + dto_with_context, + expect.any(Object) + ); + expect(result.success).toBe(true); + }); + + it('should handle reply to reply (nested conversations)', async () => { + const nested_reply_dto: ReplyBackGroundNotificationJobDTO = { + reply_to: 'user-789', + replied_by: 'user-456', + tweet: {} as any, + reply_tweet_id: 'reply-tweet-456', + original_tweet_id: 'original-tweet-123', + conversation_id: 'conversation-123', + }; + + await service.queueReplyNotification(nested_reply_dto); + + expect(mock_queue.add).toHaveBeenCalledWith( + JOB_NAMES.NOTIFICATION.REPLY, + nested_reply_dto, + expect.any(Object) + ); + }); + + it('should handle action parameter for removing replies', async () => { + const remove_reply_dto = { + ...mock_reply_dto, + action: 'remove' as const, + }; + + await service.queueReplyNotification(remove_reply_dto); + + expect(mock_queue.add).toHaveBeenCalledWith( + JOB_NAMES.NOTIFICATION.REPLY, + remove_reply_dto, + expect.any(Object) + ); + }); }); }); diff --git a/src/background-jobs/notifications/reply/reply.service.ts b/src/background-jobs/notifications/reply/reply.service.ts index d174dba5..98e15432 100644 --- a/src/background-jobs/notifications/reply/reply.service.ts +++ b/src/background-jobs/notifications/reply/reply.service.ts @@ -12,7 +12,7 @@ import type { Queue } from 'bull'; @Injectable() export class ReplyJobService extends BackgroundJobsService { - constructor(@InjectQueue(QUEUE_NAMES.NOTIFICATION) private reply_queue: Queue) { + constructor(@InjectQueue(QUEUE_NAMES.NOTIFICATION) private readonly reply_queue: Queue) { super(reply_queue, JOB_NAMES.NOTIFICATION.REPLY, JOB_PRIORITIES.HIGH, JOB_DELAYS.IMMEDIATE); } diff --git a/src/background-jobs/notifications/repost/repost.processor.spec.ts b/src/background-jobs/notifications/repost/repost.processor.spec.ts index 1ad2252d..e50622d2 100644 --- a/src/background-jobs/notifications/repost/repost.processor.spec.ts +++ b/src/background-jobs/notifications/repost/repost.processor.spec.ts @@ -173,7 +173,7 @@ describe('RepostProcessor', () => { }; tweet_repository.findOne.mockResolvedValue(mock_tweet_entity as Tweet); - notifications_service.removeRepostNotification.mockResolvedValue(true); + notifications_service.removeRepostNotification.mockResolvedValue('notification-id-123'); const job = mock_job({ repost_to: 'repost-to-id', @@ -196,16 +196,18 @@ describe('RepostProcessor', () => { expect(notifications_service.sendNotificationOnly).toHaveBeenCalledWith( NotificationType.REPOST, 'actual-owner-id', - expect.objectContaining({ - reposted_by: 'reposter-id', - }) + { + id: 'notification-id-123', + ...job.data, + action: 'remove', + } ); }); it('should use repost_to when tweet entity not found', async () => { const logger_spy = jest.spyOn(processor['logger'], 'warn'); tweet_repository.findOne.mockResolvedValue(null); - notifications_service.removeRepostNotification.mockResolvedValue(true); + notifications_service.removeRepostNotification.mockResolvedValue('notification-id-123'); const job = mock_job({ repost_to: 'repost-to-id', diff --git a/src/background-jobs/notifications/repost/repost.processor.ts b/src/background-jobs/notifications/repost/repost.processor.ts index 4bb26d11..69b9fc53 100644 --- a/src/background-jobs/notifications/repost/repost.processor.ts +++ b/src/background-jobs/notifications/repost/repost.processor.ts @@ -38,23 +38,23 @@ export class RepostProcessor { if (tweet_entity) tweet_owner_id = tweet_entity.user_id; else this.logger.warn(`Tweet with ID ${tweet_id} not found.`); - let was_deleted = false; + let notification_id: string | null = null; if (tweet_id) { - was_deleted = await this.notifications_service.removeRepostNotification( + notification_id = await this.notifications_service.removeRepostNotification( tweet_owner_id, tweet_id, reposted_by ); } - if (was_deleted) { + if (notification_id) { this.notifications_service.sendNotificationOnly( NotificationType.REPOST, tweet_owner_id, { - type: NotificationType.REPOST, + id: notification_id, ...job.data, - reposted_by, + action: 'remove', } ); } diff --git a/src/background-jobs/notifications/repost/repost.service.spec.ts b/src/background-jobs/notifications/repost/repost.service.spec.ts index a2a99131..a2b2edf7 100644 --- a/src/background-jobs/notifications/repost/repost.service.spec.ts +++ b/src/background-jobs/notifications/repost/repost.service.spec.ts @@ -176,5 +176,82 @@ describe('RepostJobService', () => { expect(result).toEqual({ success: true, job_id: 'job-missing' }); }); + + it('should handle repost with complete tweet data', async () => { + const dto: RepostBackGroundNotificationJobDTO = { + repost_to: 'author-full', + reposted_by: 'reposter-full', + tweet_id: 'tweet-full', + tweet: { + tweet_id: 'tweet-full', + content: 'Complete tweet with all data', + user: { id: 'author-full', username: 'author' }, + created_at: new Date(), + likes_count: 10, + reposts_count: 5, + } as any, + action: 'add', + }; + + const mock_job = { id: 'job-full' }; + queue.add.mockResolvedValue(mock_job as any); + + const result = await service.queueRepostNotification(dto); + + expect(result).toEqual({ success: true, job_id: 'job-full' }); + }); + + it('should apply default job configuration', async () => { + const dto: RepostBackGroundNotificationJobDTO = { + repost_to: 'author-defaults', + reposted_by: 'reposter-defaults', + tweet_id: 'tweet-defaults', + action: 'add', + }; + + const mock_job = { id: 'job-defaults' }; + queue.add.mockResolvedValue(mock_job as any); + + const result = await service.queueRepostNotification(dto); + + expect(queue.add).toHaveBeenCalledWith( + expect.any(String), + dto, + expect.objectContaining({ + attempts: 3, + backoff: expect.any(Object), + removeOnComplete: 10, + removeOnFail: 5, + }) + ); + expect(result.success).toBe(true); + }); + + it('should handle rapid repost/unrepost cycles', async () => { + const add_dto: RepostBackGroundNotificationJobDTO = { + repost_to: 'author-cycle', + reposted_by: 'reposter-cycle', + tweet_id: 'tweet-cycle', + action: 'add', + }; + + const remove_dto: RepostBackGroundNotificationJobDTO = { + repost_to: 'author-cycle', + reposted_by: 'reposter-cycle', + tweet_id: 'tweet-cycle', + action: 'remove', + }; + + queue.add + .mockResolvedValueOnce({ id: 'job-add' } as any) + .mockResolvedValueOnce({ id: 'job-remove' } as any); + + const add_result = await service.queueRepostNotification(add_dto); + const remove_result = await service.queueRepostNotification(remove_dto); + + expect(add_result).toEqual({ success: true, job_id: 'job-add' }); + expect(remove_result).toEqual({ success: true, job_id: 'job-remove' }); + expect(queue.add).toHaveBeenCalledTimes(2); + }); }); }); diff --git a/src/background-jobs/notifications/repost/repost.service.ts b/src/background-jobs/notifications/repost/repost.service.ts index eaf8868b..76dfc2ff 100644 --- a/src/background-jobs/notifications/repost/repost.service.ts +++ b/src/background-jobs/notifications/repost/repost.service.ts @@ -12,7 +12,7 @@ import { RepostBackGroundNotificationJobDTO } from './repost.dto'; @Injectable() export class RepostJobService extends BackgroundJobsService { - constructor(@InjectQueue(QUEUE_NAMES.NOTIFICATION) private repost_queue: Queue) { + constructor(@InjectQueue(QUEUE_NAMES.NOTIFICATION) private readonly repost_queue: Queue) { super( repost_queue, JOB_NAMES.NOTIFICATION.REPOST, diff --git a/src/background-jobs/timeline/timeline.cron.ts b/src/background-jobs/timeline/timeline.cron.ts new file mode 100644 index 00000000..8b5a7b91 --- /dev/null +++ b/src/background-jobs/timeline/timeline.cron.ts @@ -0,0 +1,14 @@ +import { Injectable } from '@nestjs/common'; +import { Cron, CronExpression } from '@nestjs/schedule'; +import { CleanupOldTweetsJobService } from './timeline.service'; + +@Injectable() +export class TimelineCron { + constructor(private readonly cleanup_old_tweets_job_service: CleanupOldTweetsJobService) {} + + @Cron(CronExpression.EVERY_DAY_AT_2AM) + async handleDailyCleanup() { + console.log('[Timeline Cron] Starting daily cleanup of old tweets'); + await this.cleanup_old_tweets_job_service.queueCleanupOldTweets({}); + } +} diff --git a/src/background-jobs/timeline/timeline.dto.ts b/src/background-jobs/timeline/timeline.dto.ts new file mode 100644 index 00000000..71260404 --- /dev/null +++ b/src/background-jobs/timeline/timeline.dto.ts @@ -0,0 +1,12 @@ +export interface IInitTimelineQueueJobDTO { + user_id: string; +} + +export interface IRefillTimelineQueueJobDTO { + user_id: string; + refill_count: number; +} + +export interface ICleanupOldTweetsJobDTO { + user_id?: string; // If not provided, cleanup for all users +} diff --git a/src/background-jobs/timeline/timeline.processor.spec.ts b/src/background-jobs/timeline/timeline.processor.spec.ts new file mode 100644 index 00000000..7318cef2 --- /dev/null +++ b/src/background-jobs/timeline/timeline.processor.spec.ts @@ -0,0 +1,329 @@ +import { Test, TestingModule } from '@nestjs/testing'; +import { TimelineProcessor } from './timeline.processor'; +import { TimelineRedisService } from 'src/timeline/services/timeline-redis.service'; +import { TimelineCandidatesService } from 'src/timeline/services/timeline-candidates.service'; +import { ConfigService } from '@nestjs/config'; +import { getRepositoryToken } from '@nestjs/typeorm'; +import { Repository } from 'typeorm'; +import { User } from 'src/user/entities/user.entity'; +import type { Job } from 'bull'; +import { + ICleanupOldTweetsJobDTO, + IInitTimelineQueueJobDTO, + IRefillTimelineQueueJobDTO, +} from './timeline.dto'; + +describe('TimelineProcessor', () => { + let processor: TimelineProcessor; + let timeline_redis_service: jest.Mocked; + let timeline_candidates_service: jest.Mocked; + let user_repository: jest.Mocked>; + let config_service: jest.Mocked; + + const mock_user_id = 'user-123'; + const mock_candidates = [ + { tweet_id: 'tweet-1', created_at: new Date('2024-01-01'), category_id: 1, score: 10 }, + { tweet_id: 'tweet-2', created_at: new Date('2024-01-02'), category_id: 1, score: 8 }, + { tweet_id: 'tweet-3', created_at: new Date('2024-01-03'), category_id: 2, score: 5 }, + ]; + + beforeEach(async () => { + const module: TestingModule = await Test.createTestingModule({ + providers: [ + TimelineProcessor, + { + provide: TimelineRedisService, + useValue: { + getTweetIdsInQueue: jest.fn(), + initializeQueue: jest.fn(), + addToQueue: jest.fn(), + trimQueue: jest.fn(), + removeOldTweets: jest.fn(), + getQueueSize: jest.fn(), + }, + }, + { + provide: TimelineCandidatesService, + useValue: { + getCandidates: jest.fn(), + }, + }, + { + provide: ConfigService, + useValue: { + get: jest.fn((key, default_value) => { + if (key === 'TIMELINE_QUEUE_SIZE') return 100; + if (key === 'TIMELINE_TWEET_FRESHNESS_DAYS') return 7; + if (key === 'TIMELINE_MAX_QUEUE_SIZE') return 200; + return default_value; + }), + }, + }, + { + provide: getRepositoryToken(User), + useValue: { + find: jest.fn(), + }, + }, + ], + }).compile(); + + processor = module.get(TimelineProcessor); + timeline_redis_service = module.get(TimelineRedisService); + timeline_candidates_service = module.get(TimelineCandidatesService); + user_repository = module.get(getRepositoryToken(User)); + config_service = module.get(ConfigService); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should be defined', () => { + expect(processor).toBeDefined(); + }); + + describe('handleInitQueue', () => { + it('should initialize queue for user', async () => { + const job: Job = { + data: { user_id: mock_user_id }, + } as any; + + timeline_redis_service.getTweetIdsInQueue.mockResolvedValue(new Set()); + timeline_candidates_service.getCandidates.mockResolvedValue(mock_candidates); + timeline_redis_service.initializeQueue.mockResolvedValue(3); + + await processor.handleInitQueue(job); + + expect(timeline_redis_service.getTweetIdsInQueue).toHaveBeenCalledWith(mock_user_id); + expect(timeline_candidates_service.getCandidates).toHaveBeenCalledWith( + mock_user_id, + expect.any(Set), + 100 + ); + expect(timeline_redis_service.initializeQueue).toHaveBeenCalledWith( + mock_user_id, + expect.arrayContaining([ + expect.objectContaining({ tweet_id: 'tweet-1' }), + expect.objectContaining({ tweet_id: 'tweet-2' }), + expect.objectContaining({ tweet_id: 'tweet-3' }), + ]) + ); + }); + + it('should handle no candidates found', async () => { + const job: Job = { + data: { user_id: mock_user_id }, + } as any; + + timeline_redis_service.getTweetIdsInQueue.mockResolvedValue(new Set()); + timeline_candidates_service.getCandidates.mockResolvedValue([]); + + await processor.handleInitQueue(job); + + expect(timeline_redis_service.initializeQueue).not.toHaveBeenCalled(); + }); + + it('should propagate errors', async () => { + const job: Job = { + data: { user_id: mock_user_id }, + } as any; + + const error = new Error('Redis connection failed'); + timeline_redis_service.getTweetIdsInQueue.mockRejectedValue(error); + + await expect(processor.handleInitQueue(job)).rejects.toThrow('Redis connection failed'); + }); + }); + + describe('handleRefillQueue', () => { + it('should refill queue with new candidates', async () => { + const job: Job = { + data: { user_id: mock_user_id, refill_count: 20 }, + } as any; + + timeline_redis_service.getTweetIdsInQueue.mockResolvedValue( + new Set(['existing-tweet']) + ); + timeline_candidates_service.getCandidates.mockResolvedValue(mock_candidates); + timeline_redis_service.addToQueue.mockResolvedValue(3); + timeline_redis_service.getQueueSize.mockResolvedValue(150); // Less than max, no trim needed + + await processor.handleRefillQueue(job); + + expect(timeline_candidates_service.getCandidates).toHaveBeenCalledWith( + mock_user_id, + expect.any(Set), + 20 + ); + expect(timeline_redis_service.addToQueue).toHaveBeenCalledWith( + mock_user_id, + expect.any(Array) + ); + // Should not trim since queue size < max + expect(timeline_redis_service.trimQueue).not.toHaveBeenCalled(); + }); + + it('should exclude existing tweets when refilling', async () => { + const job: Job = { + data: { user_id: mock_user_id, refill_count: 20 }, + } as any; + + const existing_ids = new Set(['tweet-1', 'tweet-2']); + timeline_redis_service.getTweetIdsInQueue.mockResolvedValue(existing_ids); + timeline_candidates_service.getCandidates.mockResolvedValue(mock_candidates); + timeline_redis_service.addToQueue.mockResolvedValue(3); + timeline_redis_service.trimQueue.mockResolvedValue(100); + + await processor.handleRefillQueue(job); + + expect(timeline_candidates_service.getCandidates).toHaveBeenCalledWith( + mock_user_id, + existing_ids, + 20 + ); + }); + + it('should handle no new candidates found', async () => { + const job: Job = { + data: { user_id: mock_user_id, refill_count: 20 }, + } as any; + + timeline_redis_service.getTweetIdsInQueue.mockResolvedValue(new Set()); + timeline_candidates_service.getCandidates.mockResolvedValue([]); + + await processor.handleRefillQueue(job); + + expect(timeline_redis_service.addToQueue).not.toHaveBeenCalled(); + expect(timeline_redis_service.trimQueue).not.toHaveBeenCalled(); + }); + + it('should trim queue after adding tweets when size exceeds max', async () => { + const job: Job = { + data: { user_id: mock_user_id, refill_count: 20 }, + } as any; + + timeline_redis_service.getTweetIdsInQueue.mockResolvedValue(new Set()); + timeline_candidates_service.getCandidates.mockResolvedValue(mock_candidates); + timeline_redis_service.addToQueue.mockResolvedValue(3); + timeline_redis_service.getQueueSize.mockResolvedValue(250); // Exceeds max of 200 + timeline_redis_service.trimQueue.mockResolvedValue(50); + + await processor.handleRefillQueue(job); + + expect(timeline_redis_service.trimQueue).toHaveBeenCalledWith(mock_user_id, 200); + }); + + it('should propagate errors', async () => { + const job: Job = { + data: { user_id: mock_user_id, refill_count: 20 }, + } as any; + + const error = new Error('Database error'); + timeline_redis_service.getTweetIdsInQueue.mockRejectedValue(error); + + await expect(processor.handleRefillQueue(job)).rejects.toThrow('Database error'); + }); + }); + + describe('handleCleanupOldTweets', () => { + it('should cleanup old tweets for all users', async () => { + const job: Job = { + data: {}, + } as any; + + const mock_users = [ + { id: 'user-1' } as User, + { id: 'user-2' } as User, + { id: 'user-3' } as User, + ]; + user_repository.find.mockResolvedValue(mock_users); + timeline_redis_service.removeOldTweets.mockResolvedValue(5); + + await processor.handleCleanupOldTweets(job); + + expect(user_repository.find).toHaveBeenCalledWith({ + select: ['id'], + where: { deleted_at: null }, + }); + expect(timeline_redis_service.removeOldTweets).toHaveBeenCalledTimes(3); + expect(timeline_redis_service.removeOldTweets).toHaveBeenCalledWith( + 'user-1', + expect.any(String) + ); + }); + + it('should calculate correct cutoff date', async () => { + const job: Job = { + data: {}, + } as any; + + const mock_users = [{ id: 'user-1' } as User]; + user_repository.find.mockResolvedValue(mock_users); + timeline_redis_service.removeOldTweets.mockResolvedValue(0); + + const now = new Date(); + await processor.handleCleanupOldTweets(job); + + const call_args = timeline_redis_service.removeOldTweets.mock.calls[0]; + const cutoff_timestamp = call_args[1]; + + // Verify cutoff timestamp is approximately 7 days ago + const cutoff_date = new Date(cutoff_timestamp); + const expected_cutoff = new Date(now); + expected_cutoff.setDate(expected_cutoff.getDate() - 7); + + const diff_hours = Math.abs(cutoff_date.getTime() - expected_cutoff.getTime()) / 36e5; + expect(diff_hours).toBeLessThan(1); // Within 1 hour tolerance + }); + + it('should handle empty user list', async () => { + const job: Job = { + data: {}, + } as any; + + user_repository.find.mockResolvedValue([]); + + await processor.handleCleanupOldTweets(job); + + expect(timeline_redis_service.removeOldTweets).not.toHaveBeenCalled(); + }); + + it('should continue on individual user errors', async () => { + const job: Job = { + data: {}, + } as any; + + const mock_users = [ + { id: 'user-1' } as User, + { id: 'user-2' } as User, + { id: 'user-3' } as User, + ]; + user_repository.find.mockResolvedValue(mock_users); + + timeline_redis_service.removeOldTweets + .mockResolvedValueOnce(5) // user-1 success + .mockRejectedValueOnce(new Error('Redis error')) // user-2 fails + .mockResolvedValueOnce(3); // user-3 success + + // Should throw because the implementation throws on error + await expect(processor.handleCleanupOldTweets(job)).rejects.toThrow(); + + // Only 2 calls because it throws on the second user's error + expect(timeline_redis_service.removeOldTweets).toHaveBeenCalledTimes(2); + }); + + it('should propagate errors from user repository', async () => { + const job: Job = { + data: {}, + } as any; + + const error = new Error('Database connection failed'); + user_repository.find.mockRejectedValue(error); + + await expect(processor.handleCleanupOldTweets(job)).rejects.toThrow( + 'Database connection failed' + ); + }); + }); +}); diff --git a/src/background-jobs/timeline/timeline.processor.ts b/src/background-jobs/timeline/timeline.processor.ts new file mode 100644 index 00000000..2fcc7dd9 --- /dev/null +++ b/src/background-jobs/timeline/timeline.processor.ts @@ -0,0 +1,173 @@ +import { Process, Processor } from '@nestjs/bull'; +import type { Job } from 'bull'; +import { JOB_NAMES, QUEUE_NAMES } from '../constants/queue.constants'; +import type { + ICleanupOldTweetsJobDTO, + IInitTimelineQueueJobDTO, + IRefillTimelineQueueJobDTO, +} from './timeline.dto'; +import { TimelineRedisService } from 'src/timeline/services/timeline-redis.service'; +import { TimelineCandidatesService } from 'src/timeline/services/timeline-candidates.service'; +import { ConfigService } from '@nestjs/config'; +import { InjectRepository } from '@nestjs/typeorm'; +import { Repository } from 'typeorm'; +import { User } from 'src/user/entities/user.entity'; + +@Processor(QUEUE_NAMES.TIMELINE) +export class TimelineProcessor { + private readonly queue_size: number; + private readonly max_queue_size: number; + private readonly tweet_freshness_days: number; + + constructor( + private readonly timeline_redis_service: TimelineRedisService, + private readonly timeline_candidates_service: TimelineCandidatesService, + private readonly config_service: ConfigService, + @InjectRepository(User) + private readonly user_repository: Repository + ) { + this.queue_size = this.config_service.get('TIMELINE_QUEUE_SIZE', 100); + this.tweet_freshness_days = this.config_service.get( + 'TIMELINE_TWEET_FRESHNESS_DAYS', + 7 + ); + this.max_queue_size = this.config_service.get('TIMELINE_MAX_QUEUE_SIZE', 200); + } + + @Process(JOB_NAMES.TIMELINE.INIT_QUEUE) + async handleInitQueue(job: Job) { + const { user_id } = job.data; + + try { + console.log(`[Timeline] Initializing queue for user ${user_id}`); + + // Get existing tweet IDs in queue (should be empty for init, but check anyway) + const existing_tweet_ids = + await this.timeline_redis_service.getTweetIdsInQueue(user_id); + + // Get candidates + const candidates = await this.timeline_candidates_service.getCandidates( + user_id, + existing_tweet_ids, + this.queue_size + ); + + if (candidates.length === 0) { + console.log(`[Timeline] No candidates found for user ${user_id}`); + return; + } + + // Initialize queue with candidates + const tweets = candidates.map((c) => ({ + tweet_id: c.tweet_id, + created_at: c.created_at.toISOString(), + })); + + const queue_size = await this.timeline_redis_service.initializeQueue(user_id, tweets); + + console.log( + `[Timeline] Initialized queue for user ${user_id} with ${queue_size} tweets` + ); + } catch (error) { + console.error(`[Timeline] Error initializing queue for user ${user_id}:`, error); + throw error; + } + } + + @Process(JOB_NAMES.TIMELINE.REFILL_QUEUE) + async handleRefillQueue(job: Job) { + const { user_id, refill_count } = job.data; + + try { + console.log( + `[Timeline] Refilling queue for user ${user_id} with ${refill_count} tweets` + ); + + // Get existing tweet IDs in queue to avoid duplicates + const existing_tweet_ids = + await this.timeline_redis_service.getTweetIdsInQueue(user_id); + + // Get new candidates + const candidates = await this.timeline_candidates_service.getCandidates( + user_id, + existing_tweet_ids, + refill_count + ); + + if (candidates.length === 0) { + console.log(`[Timeline] No new candidates found for user ${user_id}`); + return; + } + + // Add to queue + const tweets = candidates.map((c) => ({ + tweet_id: c.tweet_id, + created_at: c.created_at.toISOString(), + })); + + const added_count = await this.timeline_redis_service.addToQueue(user_id, tweets); + + console.log(`[Timeline] Added ${added_count} tweets to queue for user ${user_id}`); + + // Trim queue if it exceeds max size + const current_size = await this.timeline_redis_service.getQueueSize(user_id); + if (current_size > this.max_queue_size) { + const removed = await this.timeline_redis_service.trimQueue( + user_id, + this.max_queue_size + ); + console.log( + `[Timeline] Queue size ${current_size} exceeded max ${this.max_queue_size} for user ${user_id}, trimmed ${removed} tweets` + ); + } + } catch (error) { + console.error(`[Timeline] Error refilling queue for user ${user_id}:`, error); + throw error; + } + } + + @Process(JOB_NAMES.TIMELINE.CLEANUP_OLD_TWEETS) + async handleCleanupOldTweets(job: Job) { + const { user_id } = job.data; + + try { + // Calculate cutoff timestamp + const cutoff_date = new Date(); + cutoff_date.setDate(cutoff_date.getDate() - this.tweet_freshness_days); + const cutoff_timestamp = cutoff_date.toISOString(); + + if (user_id) { + // Cleanup for specific user + console.log(`[Timeline] Cleaning up old tweets for user ${user_id}`); + const removed = await this.timeline_redis_service.removeOldTweets( + user_id, + cutoff_timestamp + ); + console.log(`[Timeline] Removed ${removed} old tweets for user ${user_id}`); + } else { + // Cleanup for all users + console.log(`[Timeline] Cleaning up old tweets for all users`); + + // Get all users (you might want to paginate this for large databases) + const users = await this.user_repository.find({ + select: ['id'], + where: { deleted_at: null as any }, + }); + + let total_removed = 0; + for (const user of users) { + const removed = await this.timeline_redis_service.removeOldTweets( + user.id, + cutoff_timestamp + ); + total_removed += removed; + } + + console.log(`[Timeline] Removed ${total_removed} old tweets across all users`); + } + } catch (error) { + console.error('[Timeline] Error cleaning up old tweets:', error); + throw error; + } + } +} diff --git a/src/background-jobs/timeline/timeline.service.ts b/src/background-jobs/timeline/timeline.service.ts new file mode 100644 index 00000000..bb34dcb9 --- /dev/null +++ b/src/background-jobs/timeline/timeline.service.ts @@ -0,0 +1,82 @@ +import { Injectable } from '@nestjs/common'; +import { InjectQueue } from '@nestjs/bull'; +import type { Queue } from 'bull'; +import { BackgroundJobsService } from 'src/background-jobs/background-jobs'; +import { + JOB_DELAYS, + JOB_NAMES, + JOB_PRIORITIES, + QUEUE_NAMES, +} from 'src/background-jobs/constants/queue.constants'; +import { + ICleanupOldTweetsJobDTO, + IInitTimelineQueueJobDTO, + IRefillTimelineQueueJobDTO, +} from './timeline.dto'; + +@Injectable() +export class InitTimelineQueueJobService extends BackgroundJobsService { + constructor(@InjectQueue(QUEUE_NAMES.TIMELINE) private timeline_queue: Queue) { + super( + timeline_queue, + JOB_NAMES.TIMELINE.INIT_QUEUE, + JOB_PRIORITIES.MEDIUM, + JOB_DELAYS.IMMEDIATE + ); + } + + async queueInitTimelineQueue(dto: IInitTimelineQueueJobDTO, priority?: number, delay?: number) { + return await this.queueJob( + dto, + priority ?? this.priority, + delay ?? this.delay, + 'Failed to queue init timeline queue job:' + ); + } +} + +@Injectable() +export class RefillTimelineQueueJobService extends BackgroundJobsService { + constructor(@InjectQueue(QUEUE_NAMES.TIMELINE) private timeline_queue: Queue) { + super( + timeline_queue, + JOB_NAMES.TIMELINE.REFILL_QUEUE, + JOB_PRIORITIES.HIGH, + JOB_DELAYS.IMMEDIATE + ); + } + + async queueRefillTimelineQueue( + dto: IRefillTimelineQueueJobDTO, + priority?: number, + delay?: number + ) { + return await this.queueJob( + dto, + priority ?? this.priority, + delay ?? this.delay, + 'Failed to queue refill timeline queue job:' + ); + } +} + +@Injectable() +export class CleanupOldTweetsJobService extends BackgroundJobsService { + constructor(@InjectQueue(QUEUE_NAMES.TIMELINE) private timeline_queue: Queue) { + super( + timeline_queue, + JOB_NAMES.TIMELINE.CLEANUP_OLD_TWEETS, + JOB_PRIORITIES.LOW, + JOB_DELAYS.IMMEDIATE + ); + } + + async queueCleanupOldTweets(dto: ICleanupOldTweetsJobDTO, priority?: number, delay?: number) { + return await this.queueJob( + dto, + priority ?? this.priority, + delay ?? this.delay, + 'Failed to queue cleanup old tweets job:' + ); + } +} diff --git a/src/chat/chat.controller.ts b/src/chat/chat.controller.ts index d8f7f6c4..0b724e32 100644 --- a/src/chat/chat.controller.ts +++ b/src/chat/chat.controller.ts @@ -46,7 +46,7 @@ export class ChatController { @Post() async createChat(@Body() create_chat_dto: CreateChatDto, @GetUserId() user_id: string) { try { - return this.chat_service.createChat(user_id, create_chat_dto); + return await this.chat_service.createChat(user_id, create_chat_dto); } catch (error) { console.error('Error in ChatController.createChat:', error); throw error; diff --git a/src/chat/chat.repository.ts b/src/chat/chat.repository.ts index 7c4c10d1..62ef8f43 100644 --- a/src/chat/chat.repository.ts +++ b/src/chat/chat.repository.ts @@ -16,10 +16,10 @@ import { EncryptionService } from 'src/shared/services/encryption/encryption.ser @Injectable() export class ChatRepository extends Repository { constructor( - private data_source: DataSource, - private pagination_service: PaginationService, - private user_repository: UserRepository, - private encryption_service: EncryptionService + private readonly data_source: DataSource, + private readonly pagination_service: PaginationService, + private readonly user_repository: UserRepository, + private readonly encryption_service: EncryptionService ) { super(Chat, data_source.createEntityManager()); } diff --git a/src/chat/chat.service.ts b/src/chat/chat.service.ts index f99b0047..bcc3074d 100644 --- a/src/chat/chat.service.ts +++ b/src/chat/chat.service.ts @@ -21,7 +21,7 @@ export class ChatService { async createChat(user_id: string, dto: CreateChatDto) { try { - return this.chat_repository.createChat(user_id, dto); + return await this.chat_repository.createChat(user_id, dto); } catch (error) { console.error('Error in createChat:', error); throw error; diff --git a/src/chat/entities/chat.entity.ts b/src/chat/entities/chat.entity.ts index 99026095..8ebe6353 100644 --- a/src/chat/entities/chat.entity.ts +++ b/src/chat/entities/chat.entity.ts @@ -16,14 +16,14 @@ export class Chat { @PrimaryGeneratedColumn('uuid') id: string; - @ManyToOne(() => User) + @ManyToOne(() => User, { onDelete: 'CASCADE' }) @JoinColumn({ name: 'user1_id' }) user1: User; @Column() user1_id: string; - @ManyToOne(() => User) + @ManyToOne(() => User, { onDelete: 'CASCADE' }) @JoinColumn({ name: 'user2_id' }) user2: User; diff --git a/src/communication/email.service.ts b/src/communication/email.service.ts index c3fab85c..9d677799 100644 --- a/src/communication/email.service.ts +++ b/src/communication/email.service.ts @@ -5,9 +5,9 @@ import { SendEmailDto } from './dto/send-email.dto'; @Injectable() export class EmailService { - private mail_transport: Transporter; + private readonly mail_transport: Transporter; - constructor(private config_service: ConfigService) { + constructor(private readonly config_service: ConfigService) { this.mail_transport = createTransport({ host: this.config_service.get('EMAIL_HOST', 'smtp.gmail.com'), port: this.config_service.get('EMAIL_PORT', 587), diff --git a/src/constants/variables.ts b/src/constants/variables.ts index bbc6695f..1fe5dde8 100644 --- a/src/constants/variables.ts +++ b/src/constants/variables.ts @@ -20,6 +20,7 @@ export const ALLOWED_IMAGE_MIME_TYPES = [ 'image/tiff', 'image/svg+xml', 'image/x-icon', + 'image/heic', ] as const; export const MAX_IMAGE_FILE_SIZE = 5 * 1024 * 1024; // 5MB @@ -29,6 +30,8 @@ export const ALLOWED_VOICE_MIME_TYPES = [ 'audio/wav', 'audio/ogg', 'audio/mp4', + 'audio/m4a', + 'audio/x-m4a', 'audio/webm', ] as const; export const MAX_VOICE_FILE_SIZE = 5 * 1024 * 1024; // 5MB @@ -710,3 +713,324 @@ Oui ŲŠØ§ ØšŲ„ŲŠ Oui , Oui ŲŠØ§ ØšŲ„ŲŠ Oui , Oui ŲŠØ§ ØšŲ„ŲŠ Oui , Oui ŲŠØ§ ØšŲ„ { liker_index: 2, liked_user_index: 1, tweet_index: 18 }, ]; } + +// ------------------------- Fake Data for Testing Trends ------------------------- // + +export class TrendDataConstants { + static readonly TREND_BOT = { + email: 'trend@yapper.com', + password: 'Test#242', + name: 'Trend Bot', + username: 'trendbot_', + birth_date: new Date('2004-09-22'), + language: 'en' as const, + }; + + static readonly SPORTS_TRENDS = [ + '#football', + '#soccer', + '#messi', + '#ronaldo', + '#cr7', + '#fifa', + '#worldcup', + '#worldcup2026', + '#premierleague', + '#laliga', + '#seriea', + '#bundesliga', + '#ucl', + '#championsleague', + '#mbappe', + '#neymar', + '#haaland', + '#salah', + '#manutd', + '#mancity', + '#liverpool', + '#realmadrid', + '#barcelona', + '#bayern', + '#psg', + '#juventus', + '#alhilal', + '#alnassr', + '#goat', + '#ballondor', + '#leomessi', + '#cristiano', + '#argentina', + '#portugal', + '#brazil', + '#england', + '#spain', + '#germany', + '#elclasico', + '#derby', + '#goals', + '#highlights', + '#matchday', + '#hattrick', + '#skills', + '#freekick', + '#var', + '#ynwa', + '#halamadrid', + '#forcabarca', + '#miasanmia', + '#forzajuve', + '#topbins', + '#rabona', + '#bicyclekick', + '#panenka', + '#tikitaka', + '#ultras', + '#gameday', + '#comeback', + '#transfernews', + '#transferwindow', + '#epl', + '#sports', + '#euro2024', + '#copaamerica', + '#afcon', + + '#ŲƒØąØŠ_Ø§Ų„Ų‚Ø¯Ų…', + '#ŲƒŲˆØąØŠ', + '#Ų…ŲŠØŗŲŠ', + '#ØąŲˆŲ†Ø§Ų„Ø¯Ųˆ', + '#ŲƒØąŲŠØŗØĒŲŠØ§Ų†Ųˆ', + '#ŲŲŠŲØ§', + '#ŲƒØŖØŗ_Ø§Ų„ØšØ§Ų„Ų…', + '#Ø§Ų„Ø¯ŲˆØąŲŠ_Ø§Ų„ØĨŲ†ØŦŲ„ŲŠØ˛ŲŠ', + '#Ø§Ų„Ų„ŲŠØēا', + '#Ø¯ŲˆØąŲŠ_ØŖØ¨ØˇØ§Ų„_ØŖŲˆØąŲˆØ¨Ø§', + '#Ų…Ø¨Ø§Ø¨ŲŠ', + '#Ų†ŲŠŲ…Ø§Øą', + '#Ų‡Ø§Ų„Ø§Ų†Ø¯', + '#Ų…Ø­Ų…Ø¯_ØĩŲ„Ø§Ø­', + '#Ų…Ø§Ų†Ø´ØŗØĒØą_ŲŠŲˆŲ†Ø§ŲŠØĒد', + '#Ų…Ø§Ų†Ø´ØŗØĒØą_ØŗŲŠØĒ؊', + '#Ų„ŲŠŲØąØ¨ŲˆŲ„', + '#ØąŲŠØ§Ų„_Ų…Ø¯ØąŲŠØ¯', + '#Ø¨ØąØ´Ų„ŲˆŲ†ØŠ', + '#Ø§Ų„Ų‡Ų„Ø§Ų„', + '#Ø§Ų„Ų†ØĩØą', + '#Ø§Ų„ŲƒØąØŠ_Ø§Ų„Ø°Ų‡Ø¨ŲŠØŠ', + '#ØŖŲ‡Ø¯Ø§Ų', + '#Ų…Ų„ØŽØĩ_Ø§Ų„Ų…Ø¨Ø§ØąØ§ØŠ', + '#Ø§Ų„ŲƒŲ„Ø§ØŗŲŠŲƒŲˆ', + '#Ų‡Ø§ØĒØąŲŠŲƒ', + '#Ø§Ų„ØĒØąØ§Øŗ', + '#ŲŠŲˆŲ…_Ø§Ų„Ų…Ø¨Ø§ØąØ§ØŠ', + '#ŲØ§Øą', + '#Ų…Ų‡Ø§ØąØ§ØĒ', + ]; + + static readonly NEWS_TRENDS = [ + '#news', + '#breaking', + '#breakingnews', + '#latestnews', + '#worldnews', + '#politics', + '#trending', + '#viral', + '#update', + '#live', + '#cnn', + '#aljazeera', + '#skynews', + '#foxnews', + '#reuters', + '#ap', + '#bloomberg', + '#cnbc', + '#economy', + '#war', + '#ukraine', + '#russia', + '#israel', + '#gaza', + '#palestine', + '#lebanon', + '#syria', + '#iran', + '#usa', + '#america', + '#election', + '#trump', + '#b ', + '#biden', + '#bitcoin', + '#crypto', + '#ai', + '#technology', + '#iphone', + '#tesla', + '#elonmusk', + '#climatechange', + '#weather', + '#earthquake', + '#flood', + '#protest', + '#riot', + '#terrorism', + '#attack', + '#shooting', + '#crime', + '#justice', + '#court', + '#celebrity', + '#hollywood', + '#royalfamily', + '#meghanmarkle', + '#taylorswift', + '#health', + '#covid', + '#vaccine', + '#pandemic', + '#science', + '#space', + '#nasa', + '#mars', + '#business', + '#stocks', + '#finance', + + '#ØŖØŽØ¨Ø§Øą', + '#ؚاØŦŲ„', + '#ØĸØŽØą_Ø§Ų„ØŖØŽØ¨Ø§Øą', + '#Ø§Ų„ØŖØŽØ¨Ø§Øą', + '#ØŗŲŠØ§ØŗØŠ', + '#ØĒØąŲ†Ø¯', + '#ŲŲŠØąŲˆØŗ_ŲƒŲˆØąŲˆŲ†Ø§', + '#Ø§Ų„Ø­ØąØ¨', + '#ØŖŲˆŲƒØąØ§Ų†ŲŠØ§', + '#ØąŲˆØŗŲŠØ§', + '#ØĨØŗØąØ§ØĻŲŠŲ„', + '#ØēØ˛ØŠ', + '#ŲŲ„ØŗØˇŲŠŲ†', + '#Ų„Ø¨Ų†Ø§Ų†', + '#ØĨŲŠØąØ§Ų†', + '#ØŖŲ…ØąŲŠŲƒØ§', + '#ØĒØąØ§Ų…Ø¨', + '#Ø¨Ø§ŲŠØ¯Ų†', + '#Ø¨ŲŠØĒŲƒŲˆŲŠŲ†', + '#Ø§Ų„ØšŲ…Ų„Ø§ØĒ_Ø§Ų„ØąŲ‚Ų…ŲŠØŠ', + '#Ø§Ų„Ø°ŲƒØ§ØĄ_Ø§Ų„Ø§ØĩØˇŲ†Ø§ØšŲŠ', + '#ØĒŲƒŲ†ŲˆŲ„ŲˆØŦŲŠØ§', + '#ØĒØēŲŠØą_Ø§Ų„Ų…Ų†Ø§ØŽ', + '#Ø˛Ų„Ø˛Ø§Ų„', + '#احØĒØŦاØŦاØĒ', + '#Ø§Ų‚ØĒØĩاد', + '#Ø¨ŲˆØąØĩØŠ', + '#ØŦØąŲŠŲ…ØŠ', + '#Ų…Ø­ŲƒŲ…ØŠ', + '#Ų…Ø´Ø§Ų‡ŲŠØą', + ]; + + static readonly ENTERTAINMENT_TRENDS = [ + // 70 English – Entertainment (Music, Movies, Series, Celebs) + '#entertainment', + '#music', + '#movies', + '#netflix', + '#hollywood', + '#bollywood', + '#kpop', + '#bts', + '#blackpink', + '#taylorswift', + '#billieeilish', + '#arianagrande', + '#badbunny', + '#theweeknd', + '#drake', + '#beyonce', + '#rihanna', + '#eminem', + '#oscars', + '#grammys', + '#goldenglobes', + '#cannes', + '#metgala', + '#marvel', + '#mcu', + '#dc', + '#strangerthings', + '#thelastofus', + '#houseofthedragon', + '#wednesday', + '#squidgame', + '#barbie', + '#oppenheimer', + '#dune', + '#avatar', + '#johnwick', + '#missionimpossible', + '#topgun', + '#celebrity', + '#redcarpet', + '#trailer', + '#premiere', + '#boxoffice', + '#concert', + '#tour', + '#album', + '#newmusic', + '#spotify', + '#applemusic', + '#tiktok', + '#viral', + '#dance', + '#remix', + '#liveperformance', + '#awardshow', + '#selenagomez', + '#justinbieber', + '#zendaya', + '#tomholland', + '#timothee', + '#dualipa', + '#oliviarodrigo', + '#harrypotter', + '#starwars', + '#onepiece', + '#joker', + '#wick', + '#fastandfurious', + + // 30 Arabic – Entertainment (highly trending in Arab world) + '#ØĒØąŲŲŠŲ‡', + '#Ų…ŲˆØŗŲŠŲ‚Ų‰', + '#Ø§ŲŲ„Ø§Ų…', + '#Ų†ØĒŲŲ„ŲŠŲƒØŗ', + '#Ø¨ŲˆŲ„ŲŠŲˆŲˆØ¯', + '#ŲƒŲŠØ¨ŲˆØ¨', + '#ØĒØ§ŲŠŲ„ŲˆØą_ØŗŲˆŲŠŲØĒ', + '#Ø¨Ų„Ø§ŲƒØ¨ŲŠŲ†Ųƒ', + '#بØĒØŗ', + '#Ų…Ø­Ų…Ø¯_ØąŲ…ØļØ§Ų†', + '#ØšŲ…ØąŲˆ_Ø¯ŲŠØ§Ø¨', + '#ØĒØ§Ų…Øą_Ø­ØŗŲ†ŲŠ', + '#Ų†ŲˆØ§Ų„_Ø§Ų„Ø˛ØēØ¨ŲŠ', + '#Ų†Ø§Ų†ØŗŲŠ_ØšØŦØąŲ…', + '#ØĨŲ„ŲŠØŗØ§', + '#ØąØ§Øēب_ØšŲ„Ø§Ų…ØŠ', + '#Ų…ØŗŲ„ØŗŲ„Ø§ØĒ', + '#Ø¯ØąØ§Ų…Ø§', + '#ŲƒŲˆŲ…ŲŠØ¯ŲŠØ§', + '#ØŗŲŠŲ†Ų…Ø§', + '#ØŦŲˆØ§ØĻØ˛', + '#Ų…Ų‡ØąØŦØ§Ų†_Ø§Ų„ØŦŲˆŲ†ØŠ', + '#Ų…Ų‡ØąØŦØ§Ų†_ŲƒØ§Ų†', + '#ØŖŲˆØŗŲƒØ§Øą', + '#ØĒØąŲ†Ø¯', + '#ŲŲŠŲ„Ų…', + '#ØŖØēŲ†ŲŠØŠ_ØŦØ¯ŲŠØ¯ØŠ', + '#Ø­ŲŲ„ØŠ', + '#ŲƒŲˆŲ†ØŗØąØĒ', + '#ØĒ؊؃ØĒ؈؃', + ]; +} diff --git a/src/databases/data-source.ts b/src/databases/data-source.ts index fc7e3464..d8202526 100644 --- a/src/databases/data-source.ts +++ b/src/databases/data-source.ts @@ -18,11 +18,13 @@ import { Hashtag } from '../tweets/entities/hashtags.entity'; import { UserPostsView } from '../tweets/entities/user-posts-view.entity'; import { UserBlocks, UserFollows, UserMutes } from '../user/entities'; import { UserInterests } from '../user/entities/user-interests.entity'; +import { UserTimelineCursor } from '../user/entities/user-timeline-cursor.entity'; import { TweetCategory } from '../tweets/entities/tweet-category.entity'; import { Chat } from '../chat/entities/chat.entity'; import { Message } from '../messages/entities/message.entity'; import { MessageReaction } from '../messages/entities/message-reaction.entity'; import { readFileSync } from 'fs'; +import { TweetHashtag } from '../tweets/entities/tweet-hashtag.entity'; config({ path: resolve(__dirname, '../../config/.env') }); @@ -76,6 +78,8 @@ const base_config: any = { Message, MessageReaction, TweetSummary, + TweetHashtag, + UserTimelineCursor, ], migrations: [__dirname + '/../migrations/*{.ts,.js}'], diff --git a/src/databases/migrations/1734100000000-CascadeDeleteRepliesAndQuotes.ts b/src/databases/migrations/1734100000000-CascadeDeleteRepliesAndQuotes.ts new file mode 100644 index 00000000..48c1094f --- /dev/null +++ b/src/databases/migrations/1734100000000-CascadeDeleteRepliesAndQuotes.ts @@ -0,0 +1,54 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class CascadeDeleteRepliesAndQuotes1734100000000 implements MigrationInterface { + name = 'CascadeDeleteRepliesAndQuotes1734100000000'; + + public async up(query_runner: QueryRunner): Promise { + // Create a function that cascades delete for reply and quote tweets + await query_runner.query(` + CREATE OR REPLACE FUNCTION cascade_delete_child_tweets() + RETURNS TRIGGER AS $$ + BEGIN + -- Delete all reply tweets when a parent tweet is deleted + DELETE FROM tweets + WHERE tweet_id IN ( + SELECT reply_tweet_id + FROM tweet_replies + WHERE original_tweet_id = OLD.tweet_id + ); + + -- Delete all quote tweets when a parent tweet is deleted + DELETE FROM tweets + WHERE tweet_id IN ( + SELECT quote_tweet_id + FROM tweet_quotes + WHERE original_tweet_id = OLD.tweet_id + ); + + RETURN OLD; + END; + $$ LANGUAGE plpgsql; + `); + + // Create trigger that runs BEFORE a tweet is deleted + // This ensures the relationships still exist when we query them + await query_runner.query(` + CREATE TRIGGER trigger_cascade_delete_child_tweets + BEFORE DELETE ON tweets + FOR EACH ROW + EXECUTE FUNCTION cascade_delete_child_tweets(); + `); + } + + public async down(query_runner: QueryRunner): Promise { + // Drop the trigger first + await query_runner.query(` + DROP TRIGGER IF EXISTS trigger_cascade_delete_child_tweets ON tweets; + `); + + // Drop the function + await query_runner.query(` + DROP FUNCTION IF EXISTS cascade_delete_child_tweets(); + `); + } +} diff --git a/src/databases/migrations/1734100000002-EnhanceCascadeDeleteWithHashtagsAndES.ts b/src/databases/migrations/1734100000002-EnhanceCascadeDeleteWithHashtagsAndES.ts new file mode 100644 index 00000000..46095165 --- /dev/null +++ b/src/databases/migrations/1734100000002-EnhanceCascadeDeleteWithHashtagsAndES.ts @@ -0,0 +1,137 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class EnhanceCascadeDeleteWithHashtagsAndES1734100000002 implements MigrationInterface { + name = 'EnhanceCascadeDeleteWithHashtagsAndES1736100000002'; + + public async up(query_runner: QueryRunner): Promise { + // Create a table to track deleted tweets for Elasticsearch cleanup + await query_runner.query(` + CREATE TABLE IF NOT EXISTS deleted_tweets_log ( + tweet_id uuid NOT NULL, + content text, + deleted_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), + PRIMARY KEY (tweet_id) + ) + `); + + // Create index for efficient cleanup queries + await query_runner.query(` + CREATE INDEX IF NOT EXISTS idx_deleted_tweets_deleted_at + ON deleted_tweets_log(deleted_at) + `); + + // Drop the old trigger and function + await query_runner.query(` + DROP TRIGGER IF EXISTS trigger_cascade_delete_child_tweets ON tweets; + `); + await query_runner.query(` + DROP FUNCTION IF EXISTS cascade_delete_child_tweets(); + `); + + // Create function that logs deletions with content + await query_runner.query(` + CREATE OR REPLACE FUNCTION cascade_delete_child_tweets() + RETURNS TRIGGER AS $$ + BEGIN + -- Log all child tweets (replies and quotes) with their content + INSERT INTO deleted_tweets_log (tweet_id, content) + SELECT tweet_id, content + FROM tweets + WHERE tweet_id IN ( + SELECT reply_tweet_id + FROM tweet_replies + WHERE original_tweet_id = OLD.tweet_id + + UNION + + SELECT quote_tweet_id + FROM tweet_quotes + WHERE original_tweet_id = OLD.tweet_id + ) + ON CONFLICT (tweet_id) DO NOTHING; + + -- Log the main tweet being deleted with its content + INSERT INTO deleted_tweets_log (tweet_id, content) + VALUES (OLD.tweet_id, OLD.content) + ON CONFLICT (tweet_id) DO NOTHING; + + -- Delete all reply tweets when a parent tweet is deleted + DELETE FROM tweets + WHERE tweet_id IN ( + SELECT reply_tweet_id + FROM tweet_replies + WHERE original_tweet_id = OLD.tweet_id + ); + + -- Delete all quote tweets when a parent tweet is deleted + DELETE FROM tweets + WHERE tweet_id IN ( + SELECT quote_tweet_id + FROM tweet_quotes + WHERE original_tweet_id = OLD.tweet_id + ); + + RETURN OLD; + END; + $$ LANGUAGE plpgsql; + `); + + // Recreate the trigger + await query_runner.query(` + CREATE TRIGGER trigger_cascade_delete_child_tweets + BEFORE DELETE ON tweets + FOR EACH ROW + EXECUTE FUNCTION cascade_delete_child_tweets(); + `); + } + + public async down(query_runner: QueryRunner): Promise { + // Drop the enhanced trigger and function + await query_runner.query(` + DROP TRIGGER IF EXISTS trigger_cascade_delete_child_tweets ON tweets; + `); + await query_runner.query(` + DROP FUNCTION IF EXISTS cascade_delete_child_tweets(); + `); + + // Restore the original simple function + await query_runner.query(` + CREATE OR REPLACE FUNCTION cascade_delete_child_tweets() + RETURNS TRIGGER AS $$ + BEGIN + DELETE FROM tweets + WHERE tweet_id IN ( + SELECT reply_tweet_id + FROM tweet_replies + WHERE original_tweet_id = OLD.tweet_id + ); + + DELETE FROM tweets + WHERE tweet_id IN ( + SELECT quote_tweet_id + FROM tweet_quotes + WHERE original_tweet_id = OLD.tweet_id + ); + + RETURN OLD; + END; + $$ LANGUAGE plpgsql; + `); + + // Recreate the original trigger + await query_runner.query(` + CREATE TRIGGER trigger_cascade_delete_child_tweets + BEFORE DELETE ON tweets + FOR EACH ROW + EXECUTE FUNCTION cascade_delete_child_tweets(); + `); + + // Drop the deleted tweets log table + await query_runner.query(` + DROP INDEX IF EXISTS idx_deleted_tweets_deleted_at; + `); + await query_runner.query(` + DROP TABLE IF EXISTS deleted_tweets_log; + `); + } +} diff --git a/src/databases/migrations/1734100000003-AddIncrementViewsFunction.ts b/src/databases/migrations/1734100000003-AddIncrementViewsFunction.ts new file mode 100644 index 00000000..4fe6aa04 --- /dev/null +++ b/src/databases/migrations/1734100000003-AddIncrementViewsFunction.ts @@ -0,0 +1,50 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class AddIncrementViewsFunction1734100000003 implements MigrationInterface { + name = 'AddIncrementViewsFunction1734100000003'; + + public async up(query_runner: QueryRunner): Promise { + // Create a function that increments tweet views atomically + await query_runner.query(` + CREATE OR REPLACE FUNCTION increment_tweet_view(p_tweet_id UUID) + RETURNS INTEGER AS $$ + DECLARE + v_new_count INTEGER; + BEGIN + UPDATE tweets + SET num_views = num_views + 1 + WHERE tweet_id = p_tweet_id + RETURNING num_views INTO v_new_count; + + RETURN COALESCE(v_new_count, 0); + END; + $$ LANGUAGE plpgsql; + `); + + // Create a function that increments multiple tweet views at once + await query_runner.query(` + CREATE OR REPLACE FUNCTION increment_tweet_views_batch(p_tweet_ids UUID[]) + RETURNS VOID AS $$ + BEGIN + UPDATE tweets + SET num_views = num_views + 1 + WHERE tweet_id = ANY(p_tweet_ids); + END; + $$ LANGUAGE plpgsql; + `); + + // Create an index on tweet_id if it doesn't exist for better performance + await query_runner.query(` + CREATE INDEX IF NOT EXISTS idx_tweets_tweet_id ON tweets(tweet_id); + `); + } + + public async down(query_runner: QueryRunner): Promise { + // Drop the functions + await query_runner.query(`DROP FUNCTION IF EXISTS increment_tweet_view(UUID);`); + await query_runner.query(`DROP FUNCTION IF EXISTS increment_tweet_views_batch(UUID[]);`); + + // Drop the index + await query_runner.query(`DROP INDEX IF EXISTS idx_tweets_tweet_id;`); + } +} diff --git a/src/databases/migrations/1765394569999-CreateHashtagCleanupTrigger.ts b/src/databases/migrations/1765394569999-CreateHashtagCleanupTrigger.ts new file mode 100644 index 00000000..b60879c5 --- /dev/null +++ b/src/databases/migrations/1765394569999-CreateHashtagCleanupTrigger.ts @@ -0,0 +1,48 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class CreateHashtagCleanupTrigger1765394569999 implements MigrationInterface { + public async up(query_runner: QueryRunner): Promise { + // Create function to cleanup hashtags when tweet is deleted + await query_runner.query(` + CREATE OR REPLACE FUNCTION cleanup_hashtags_on_tweet_delete() + RETURNS TRIGGER AS $$ + BEGIN + -- Decrement usage_count for all hashtags associated with the deleted tweet + UPDATE hashtag + SET usage_count = usage_count - 1 + WHERE name IN ( + SELECT hashtag_name + FROM tweet_hashtags + WHERE tweet_id = OLD.tweet_id + ); + + -- Delete hashtags with usage_count <= 0 + DELETE FROM hashtag + WHERE usage_count <= 0; + + RETURN OLD; + END; + $$ LANGUAGE plpgsql; + `); + + // Create trigger that fires BEFORE DELETE on tweet table + await query_runner.query(` + CREATE TRIGGER tweet_delete_hashtag_cleanup_trigger + BEFORE DELETE ON "tweets" + FOR EACH ROW + EXECUTE FUNCTION cleanup_hashtags_on_tweet_delete(); + `); + } + + public async down(query_runner: QueryRunner): Promise { + // Drop trigger + await query_runner.query(` + DROP TRIGGER IF EXISTS tweet_delete_hashtag_cleanup_trigger ON "tweets" + `); + + // Drop function + await query_runner.query(` + DROP FUNCTION IF EXISTS cleanup_hashtags_on_tweet_delete() + `); + } +} diff --git a/src/databases/migrations/1765447556136-mentions copy.ts b/src/databases/migrations/1765447556136-mentions copy.ts new file mode 100644 index 00000000..0193f489 --- /dev/null +++ b/src/databases/migrations/1765447556136-mentions copy.ts @@ -0,0 +1,15 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class Mentions1765447556136 implements MigrationInterface { + name = 'Mentions1765447556136'; + + public async up(query_runner: QueryRunner): Promise { + await query_runner.query( + `ALTER TABLE "tweets" ADD "mentions" text array NOT NULL DEFAULT '{}'` + ); + } + + public async down(query_runner: QueryRunner): Promise { + await query_runner.query(`ALTER TABLE "tweets" DROP COLUMN "mentions"`); + } +} diff --git a/src/databases/migrations/1765447556136-mentions.ts b/src/databases/migrations/1765447556136-mentions.ts new file mode 100644 index 00000000..f38a66ee --- /dev/null +++ b/src/databases/migrations/1765447556136-mentions.ts @@ -0,0 +1,27 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class Mentions1765447556136 implements MigrationInterface { + name = 'Mentions1765447556136'; + + public async up(query_runner: QueryRunner): Promise { + // Check if the column already exists + const table = await query_runner.getTable('tweets'); + const mentions_column = table?.columns.find((col) => col.name === 'mentions'); + + if (!mentions_column) { + await query_runner.query( + `ALTER TABLE "tweets" ADD "mentions" text array NOT NULL DEFAULT '{}'` + ); + } + } + + public async down(query_runner: QueryRunner): Promise { + // Check if the column exists before dropping + const table = await query_runner.getTable('tweets'); + const mentions_column = table?.columns.find((col) => col.name === 'mentions'); + + if (mentions_column) { + await query_runner.query(`ALTER TABLE "tweets" DROP COLUMN "mentions"`); + } + } +} diff --git a/src/databases/migrations/1765539117542-view_bookmarks.ts b/src/databases/migrations/1765539117542-view_bookmarks.ts new file mode 100644 index 00000000..78c8cb0f --- /dev/null +++ b/src/databases/migrations/1765539117542-view_bookmarks.ts @@ -0,0 +1,191 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class ViewBookmarks1765539117542 implements MigrationInterface { + name = 'ViewBookmarks1765539117542'; + + public async up(query_runner: QueryRunner): Promise { + await query_runner.query( + `DELETE FROM "typeorm_metadata" WHERE "type" = $1 AND "name" = $2 AND "schema" = $3`, + ['VIEW', 'user_posts_view', 'public'] + ); + await query_runner.query(`DROP VIEW "user_posts_view"`); + await query_runner.query(`CREATE VIEW "user_posts_view" AS + SELECT + t.tweet_id::text AS id, + t.user_id AS profile_user_id, + t.user_id AS tweet_author_id, + t.tweet_id, + NULL::uuid AS repost_id, + 'tweet' AS post_type, + t.created_at AS post_date, + t.type::text AS type, + t.content, + t.images, + t.videos, + t.num_likes, + t.num_reposts, + t.num_views, + t.num_quotes, + t.num_replies, + t.num_bookmarks, + t.created_at, + t.updated_at, + u.username, + u.name, + u.followers, + u.following, + u.avatar_url, + u.cover_url, + u.verified, + u.bio, + NULL::text AS reposted_by_name, + COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id, + trep.conversation_id AS conversation_id + FROM tweets t + INNER JOIN "user" u ON t.user_id = u.id + LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id + LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id + + UNION ALL + + SELECT + (tr.tweet_id::text || '_' || tr.user_id::text) AS id, + tr.user_id AS profile_user_id, + t.user_id AS tweet_author_id, + tr.tweet_id, + tr.tweet_id AS repost_id, + t.type::text AS post_type, + tr.created_at AS post_date, + 'repost' AS type, + t.content, + t.images, + t.videos, + t.num_likes, + t.num_reposts, + t.num_views, + t.num_quotes, + t.num_replies, + t.num_bookmarks, + t.created_at, + t.updated_at, + u.username, + u.name, + u.followers, + u.following, + u.avatar_url, + u.cover_url, + u.verified, + u.bio, + reposter.name AS reposted_by_name, + COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id, + trep.conversation_id AS conversation_id + + FROM tweet_reposts tr + INNER JOIN tweets t ON tr.tweet_id = t.tweet_id + INNER JOIN "user" u ON t.user_id = u.id + INNER JOIN "user" reposter ON tr.user_id = reposter.id + LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id + LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id + `); + await query_runner.query( + `INSERT INTO "typeorm_metadata"("database", "schema", "table", "type", "name", "value") VALUES (DEFAULT, $1, DEFAULT, $2, $3, $4)`, + [ + 'public', + 'VIEW', + 'user_posts_view', + 'SELECT \n t.tweet_id::text AS id,\n t.user_id AS profile_user_id,\n t.user_id AS tweet_author_id,\n t.tweet_id,\n NULL::uuid AS repost_id,\n \'tweet\' AS post_type,\n t.created_at AS post_date,\n t.type::text AS type,\n t.content,\n t.images,\n t.videos,\n t.num_likes,\n t.num_reposts,\n t.num_views,\n t.num_quotes,\n t.num_replies,\n t.num_bookmarks,\n t.created_at,\n t.updated_at,\n u.username,\n u.name,\n u.followers,\n u.following,\n u.avatar_url,\n u.cover_url,\n u.verified,\n u.bio,\n NULL::text AS reposted_by_name,\n COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id,\n trep.conversation_id AS conversation_id\n FROM tweets t\n INNER JOIN "user" u ON t.user_id = u.id\n LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id\n LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id\n \n UNION ALL\n \n SELECT \n (tr.tweet_id::text || \'_\' || tr.user_id::text) AS id,\n tr.user_id AS profile_user_id,\n t.user_id AS tweet_author_id,\n tr.tweet_id,\n tr.tweet_id AS repost_id,\n t.type::text AS post_type,\n tr.created_at AS post_date,\n \'repost\' AS type,\n t.content,\n t.images,\n t.videos,\n t.num_likes,\n t.num_reposts,\n t.num_views,\n t.num_quotes,\n t.num_replies,\n t.num_bookmarks,\n t.created_at,\n t.updated_at,\n u.username,\n u.name,\n u.followers,\n u.following,\n u.avatar_url,\n u.cover_url,\n u.verified,\n u.bio,\n reposter.name AS reposted_by_name,\n COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id,\n trep.conversation_id AS conversation_id\n\n FROM tweet_reposts tr\n INNER JOIN tweets t ON tr.tweet_id = t.tweet_id\n INNER JOIN "user" u ON t.user_id = u.id\n INNER JOIN "user" reposter ON tr.user_id = reposter.id\n LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id\n LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id', + ] + ); + } + + public async down(query_runner: QueryRunner): Promise { + await query_runner.query( + `DELETE FROM "typeorm_metadata" WHERE "type" = $1 AND "name" = $2 AND "schema" = $3`, + ['VIEW', 'user_posts_view', 'public'] + ); + await query_runner.query(`DROP VIEW "user_posts_view"`); + await query_runner.query(`CREATE VIEW "user_posts_view" AS SELECT + t.tweet_id::text AS id, + t.user_id AS profile_user_id, + t.user_id AS tweet_author_id, + t.tweet_id, + NULL::uuid AS repost_id, + 'tweet' AS post_type, + t.created_at AS post_date, + t.type::text AS type, + t.content, + t.images, + t.videos, + t.num_likes, + t.num_reposts, + t.num_views, + t.num_quotes, + t.num_replies, + t.created_at, + t.updated_at, + u.username, + u.name, + u.followers, + u.following, + u.avatar_url, + u.cover_url, + u.verified, + u.bio, + NULL::text AS reposted_by_name, + COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id, + trep.conversation_id AS conversation_id + FROM tweets t + INNER JOIN "user" u ON t.user_id = u.id + LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id + LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id + + UNION ALL + + SELECT + (tr.tweet_id::text || '_' || tr.user_id::text) AS id, + tr.user_id AS profile_user_id, + t.user_id AS tweet_author_id, + tr.tweet_id, + tr.tweet_id AS repost_id, + t.type::text AS post_type, + tr.created_at AS post_date, + 'repost' AS type, + t.content, + t.images, + t.videos, + t.num_likes, + t.num_reposts, + t.num_views, + t.num_quotes, + t.num_replies, + t.created_at, + t.updated_at, + u.username, + u.name, + u.followers, + u.following, + u.avatar_url, + u.cover_url, + u.verified, + u.bio, + reposter.name AS reposted_by_name, + COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id, + trep.conversation_id AS conversation_id + + FROM tweet_reposts tr + INNER JOIN tweets t ON tr.tweet_id = t.tweet_id + INNER JOIN "user" u ON t.user_id = u.id + INNER JOIN "user" reposter ON tr.user_id = reposter.id + LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id + LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id`); + await query_runner.query( + `INSERT INTO "typeorm_metadata"("database", "schema", "table", "type", "name", "value") VALUES (DEFAULT, $1, DEFAULT, $2, $3, $4)`, + [ + 'public', + 'VIEW', + 'user_posts_view', + 'SELECT \n t.tweet_id::text AS id,\n t.user_id AS profile_user_id,\n t.user_id AS tweet_author_id,\n t.tweet_id,\n NULL::uuid AS repost_id,\n \'tweet\' AS post_type,\n t.created_at AS post_date,\n t.type::text AS type,\n t.content,\n t.images,\n t.videos,\n t.num_likes,\n t.num_reposts,\n t.num_views,\n t.num_quotes,\n t.num_replies,\n t.created_at,\n t.updated_at,\n u.username,\n u.name,\n u.followers,\n u.following,\n u.avatar_url,\n u.cover_url,\n u.verified,\n u.bio,\n NULL::text AS reposted_by_name,\n COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id,\n trep.conversation_id AS conversation_id\n FROM tweets t\n INNER JOIN "user" u ON t.user_id = u.id\n LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id\n LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id\n \n UNION ALL\n \n SELECT \n (tr.tweet_id::text || \'_\' || tr.user_id::text) AS id,\n tr.user_id AS profile_user_id,\n t.user_id AS tweet_author_id,\n tr.tweet_id,\n tr.tweet_id AS repost_id,\n t.type::text AS post_type,\n tr.created_at AS post_date,\n \'repost\' AS type,\n t.content,\n t.images,\n t.videos,\n t.num_likes,\n t.num_reposts,\n t.num_views,\n t.num_quotes,\n t.num_replies,\n t.created_at,\n t.updated_at,\n u.username,\n u.name,\n u.followers,\n u.following,\n u.avatar_url,\n u.cover_url,\n u.verified,\n u.bio,\n reposter.name AS reposted_by_name,\n COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id,\n trep.conversation_id AS conversation_id\n\n FROM tweet_reposts tr\n INNER JOIN tweets t ON tr.tweet_id = t.tweet_id\n INNER JOIN "user" u ON t.user_id = u.id\n INNER JOIN "user" reposter ON tr.user_id = reposter.id\n LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id\n LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id', + ] + ); + } +} diff --git a/src/databases/migrations/1765539749754-view_mentions.ts b/src/databases/migrations/1765539749754-view_mentions.ts new file mode 100644 index 00000000..72130598 --- /dev/null +++ b/src/databases/migrations/1765539749754-view_mentions.ts @@ -0,0 +1,195 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class ViewMentions1765539749754 implements MigrationInterface { + name = 'ViewMentions1765539749754'; + + public async up(query_runner: QueryRunner): Promise { + await query_runner.query( + `DELETE FROM "typeorm_metadata" WHERE "type" = $1 AND "name" = $2 AND "schema" = $3`, + ['VIEW', 'user_posts_view', 'public'] + ); + await query_runner.query(`DROP VIEW "user_posts_view"`); + await query_runner.query(`CREATE VIEW "user_posts_view" AS + SELECT + t.tweet_id::text AS id, + t.user_id AS profile_user_id, + t.user_id AS tweet_author_id, + t.tweet_id, + NULL::uuid AS repost_id, + 'tweet' AS post_type, + t.created_at AS post_date, + t.type::text AS type, + t.content, + t.images, + t.videos, + t.num_likes, + t.num_reposts, + t.num_views, + t.num_quotes, + t.num_replies, + t.num_bookmarks, + t.mentions, + t.created_at, + t.updated_at, + u.username, + u.name, + u.followers, + u.following, + u.avatar_url, + u.cover_url, + u.verified, + u.bio, + NULL::text AS reposted_by_name, + COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id, + trep.conversation_id AS conversation_id + FROM tweets t + INNER JOIN "user" u ON t.user_id = u.id + LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id + LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id + + UNION ALL + + SELECT + (tr.tweet_id::text || '_' || tr.user_id::text) AS id, + tr.user_id AS profile_user_id, + t.user_id AS tweet_author_id, + tr.tweet_id, + tr.tweet_id AS repost_id, + t.type::text AS post_type, + tr.created_at AS post_date, + 'repost' AS type, + t.content, + t.images, + t.videos, + t.num_likes, + t.num_reposts, + t.num_views, + t.num_quotes, + t.num_replies, + t.num_bookmarks, + t.mentions, + t.created_at, + t.updated_at, + u.username, + u.name, + u.followers, + u.following, + u.avatar_url, + u.cover_url, + u.verified, + u.bio, + reposter.name AS reposted_by_name, + COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id, + trep.conversation_id AS conversation_id + + FROM tweet_reposts tr + INNER JOIN tweets t ON tr.tweet_id = t.tweet_id + INNER JOIN "user" u ON t.user_id = u.id + INNER JOIN "user" reposter ON tr.user_id = reposter.id + LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id + LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id + `); + await query_runner.query( + `INSERT INTO "typeorm_metadata"("database", "schema", "table", "type", "name", "value") VALUES (DEFAULT, $1, DEFAULT, $2, $3, $4)`, + [ + 'public', + 'VIEW', + 'user_posts_view', + 'SELECT \n t.tweet_id::text AS id,\n t.user_id AS profile_user_id,\n t.user_id AS tweet_author_id,\n t.tweet_id,\n NULL::uuid AS repost_id,\n \'tweet\' AS post_type,\n t.created_at AS post_date,\n t.type::text AS type,\n t.content,\n t.images,\n t.videos,\n t.num_likes,\n t.num_reposts,\n t.num_views,\n t.num_quotes,\n t.num_replies,\n t.num_bookmarks,\n t.mentions,\n t.created_at,\n t.updated_at,\n u.username,\n u.name,\n u.followers,\n u.following,\n u.avatar_url,\n u.cover_url,\n u.verified,\n u.bio,\n NULL::text AS reposted_by_name,\n COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id,\n trep.conversation_id AS conversation_id\n FROM tweets t\n INNER JOIN "user" u ON t.user_id = u.id\n LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id\n LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id\n \n UNION ALL\n \n SELECT \n (tr.tweet_id::text || \'_\' || tr.user_id::text) AS id,\n tr.user_id AS profile_user_id,\n t.user_id AS tweet_author_id,\n tr.tweet_id,\n tr.tweet_id AS repost_id,\n t.type::text AS post_type,\n tr.created_at AS post_date,\n \'repost\' AS type,\n t.content,\n t.images,\n t.videos,\n t.num_likes,\n t.num_reposts,\n t.num_views,\n t.num_quotes,\n t.num_replies,\n t.num_bookmarks,\n t.mentions,\n t.created_at,\n t.updated_at,\n u.username,\n u.name,\n u.followers,\n u.following,\n u.avatar_url,\n u.cover_url,\n u.verified,\n u.bio,\n reposter.name AS reposted_by_name,\n COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id,\n trep.conversation_id AS conversation_id\n\n FROM tweet_reposts tr\n INNER JOIN tweets t ON tr.tweet_id = t.tweet_id\n INNER JOIN "user" u ON t.user_id = u.id\n INNER JOIN "user" reposter ON tr.user_id = reposter.id\n LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id\n LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id', + ] + ); + } + + public async down(query_runner: QueryRunner): Promise { + await query_runner.query( + `DELETE FROM "typeorm_metadata" WHERE "type" = $1 AND "name" = $2 AND "schema" = $3`, + ['VIEW', 'user_posts_view', 'public'] + ); + await query_runner.query(`DROP VIEW "user_posts_view"`); + await query_runner.query(`CREATE VIEW "user_posts_view" AS SELECT + t.tweet_id::text AS id, + t.user_id AS profile_user_id, + t.user_id AS tweet_author_id, + t.tweet_id, + NULL::uuid AS repost_id, + 'tweet' AS post_type, + t.created_at AS post_date, + t.type::text AS type, + t.content, + t.images, + t.videos, + t.num_likes, + t.num_reposts, + t.num_views, + t.num_quotes, + t.num_replies, + t.num_bookmarks, + t.created_at, + t.updated_at, + u.username, + u.name, + u.followers, + u.following, + u.avatar_url, + u.cover_url, + u.verified, + u.bio, + NULL::text AS reposted_by_name, + COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id, + trep.conversation_id AS conversation_id + FROM tweets t + INNER JOIN "user" u ON t.user_id = u.id + LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id + LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id + + UNION ALL + + SELECT + (tr.tweet_id::text || '_' || tr.user_id::text) AS id, + tr.user_id AS profile_user_id, + t.user_id AS tweet_author_id, + tr.tweet_id, + tr.tweet_id AS repost_id, + t.type::text AS post_type, + tr.created_at AS post_date, + 'repost' AS type, + t.content, + t.images, + t.videos, + t.num_likes, + t.num_reposts, + t.num_views, + t.num_quotes, + t.num_replies, + t.num_bookmarks, + t.created_at, + t.updated_at, + u.username, + u.name, + u.followers, + u.following, + u.avatar_url, + u.cover_url, + u.verified, + u.bio, + reposter.name AS reposted_by_name, + COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id, + trep.conversation_id AS conversation_id + + FROM tweet_reposts tr + INNER JOIN tweets t ON tr.tweet_id = t.tweet_id + INNER JOIN "user" u ON t.user_id = u.id + INNER JOIN "user" reposter ON tr.user_id = reposter.id + LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id + LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id`); + await query_runner.query( + `INSERT INTO "typeorm_metadata"("database", "schema", "table", "type", "name", "value") VALUES (DEFAULT, $1, DEFAULT, $2, $3, $4)`, + [ + 'public', + 'VIEW', + 'user_posts_view', + 'SELECT \n t.tweet_id::text AS id,\n t.user_id AS profile_user_id,\n t.user_id AS tweet_author_id,\n t.tweet_id,\n NULL::uuid AS repost_id,\n \'tweet\' AS post_type,\n t.created_at AS post_date,\n t.type::text AS type,\n t.content,\n t.images,\n t.videos,\n t.num_likes,\n t.num_reposts,\n t.num_views,\n t.num_quotes,\n t.num_replies,\n t.num_bookmarks,\n t.created_at,\n t.updated_at,\n u.username,\n u.name,\n u.followers,\n u.following,\n u.avatar_url,\n u.cover_url,\n u.verified,\n u.bio,\n NULL::text AS reposted_by_name,\n COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id,\n trep.conversation_id AS conversation_id\n FROM tweets t\n INNER JOIN "user" u ON t.user_id = u.id\n LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id\n LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id\n \n UNION ALL\n \n SELECT \n (tr.tweet_id::text || \'_\' || tr.user_id::text) AS id,\n tr.user_id AS profile_user_id,\n t.user_id AS tweet_author_id,\n tr.tweet_id,\n tr.tweet_id AS repost_id,\n t.type::text AS post_type,\n tr.created_at AS post_date,\n \'repost\' AS type,\n t.content,\n t.images,\n t.videos,\n t.num_likes,\n t.num_reposts,\n t.num_views,\n t.num_quotes,\n t.num_replies,\n t.num_bookmarks,\n t.created_at,\n t.updated_at,\n u.username,\n u.name,\n u.followers,\n u.following,\n u.avatar_url,\n u.cover_url,\n u.verified,\n u.bio,\n reposter.name AS reposted_by_name,\n COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id,\n trep.conversation_id AS conversation_id\n\n FROM tweet_reposts tr\n INNER JOIN tweets t ON tr.tweet_id = t.tweet_id\n INNER JOIN "user" u ON t.user_id = u.id\n INNER JOIN "user" reposter ON tr.user_id = reposter.id\n LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id\n LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id', + ] + ); + } +} diff --git a/src/databases/migrations/1765557470457-removeCreatedBy.ts b/src/databases/migrations/1765557470457-removeCreatedBy.ts new file mode 100644 index 00000000..0ecb1fb3 --- /dev/null +++ b/src/databases/migrations/1765557470457-removeCreatedBy.ts @@ -0,0 +1,21 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class RemoveCreatedBy1765557470457 implements MigrationInterface { + name = 'RemoveCreatedBy1765557470457'; + + public async up(query_runner: QueryRunner): Promise { + await query_runner.query( + `ALTER TABLE "hashtag" DROP CONSTRAINT "FK_11c8b3519f62b36dd5385c217d3"` + ); + + await query_runner.query(`ALTER TABLE "hashtag" DROP COLUMN "created_by"`); + } + + public async down(query_runner: QueryRunner): Promise { + await query_runner.query(`ALTER TABLE "hashtag" ADD "created_by" uuid`); + + await query_runner.query( + `ALTER TABLE "hashtag" ADD CONSTRAINT "FK_11c8b3519f62b36dd5385c217d3" FOREIGN KEY ("created_by") REFERENCES "user"("id") ON DELETE NO ACTION ON UPDATE NO ACTION` + ); + } +} diff --git a/src/databases/migrations/1765585636405-TweetHashtagEntity.ts b/src/databases/migrations/1765585636405-TweetHashtagEntity.ts new file mode 100644 index 00000000..6745b9df --- /dev/null +++ b/src/databases/migrations/1765585636405-TweetHashtagEntity.ts @@ -0,0 +1,29 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class TweetHashtagEntity1765585636405 implements MigrationInterface { + name = 'TweetHashtagEntity1765585636405'; + + public async up(query_runner: QueryRunner): Promise { + await query_runner.query( + `CREATE TABLE "tweet_hashtags" ("tweet_id" uuid NOT NULL, "hashtag_name" character varying NOT NULL, CONSTRAINT "PK_42219b0e52e3bee49d2772b3a54" PRIMARY KEY ("tweet_id", "hashtag_name"))` + ); + + await query_runner.query( + `ALTER TABLE "tweet_hashtags" ADD CONSTRAINT "FK_efe191c9c3d1359e60bac167736" FOREIGN KEY ("tweet_id") REFERENCES "tweets"("tweet_id") ON DELETE CASCADE ON UPDATE NO ACTION` + ); + await query_runner.query( + `ALTER TABLE "tweet_hashtags" ADD CONSTRAINT "FK_b0a40275de4a8088c5e6426419d" FOREIGN KEY ("hashtag_name") REFERENCES "hashtag"("name") ON DELETE CASCADE ON UPDATE NO ACTION` + ); + } + + public async down(query_runner: QueryRunner): Promise { + await query_runner.query( + `ALTER TABLE "tweet_hashtags" DROP CONSTRAINT "FK_b0a40275de4a8088c5e6426419d"` + ); + await query_runner.query( + `ALTER TABLE "tweet_hashtags" DROP CONSTRAINT "FK_efe191c9c3d1359e60bac167736"` + ); + + await query_runner.query(`DROP TABLE "tweet_hashtags"`); + } +} diff --git a/src/databases/migrations/1765636698571-AddReposterUsername.ts b/src/databases/migrations/1765636698571-AddReposterUsername.ts new file mode 100644 index 00000000..18a19e47 --- /dev/null +++ b/src/databases/migrations/1765636698571-AddReposterUsername.ts @@ -0,0 +1,199 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class AddReposterUsername1765636698571 implements MigrationInterface { + name = 'AddReposterUsername1765636698571'; + + public async up(query_runner: QueryRunner): Promise { + await query_runner.query( + `DELETE FROM "typeorm_metadata" WHERE "type" = $1 AND "name" = $2 AND "schema" = $3`, + ['VIEW', 'user_posts_view', 'public'] + ); + await query_runner.query(`DROP VIEW "user_posts_view"`); + await query_runner.query(`CREATE VIEW "user_posts_view" AS + SELECT + t.tweet_id::text AS id, + t.user_id AS profile_user_id, + t.user_id AS tweet_author_id, + t.tweet_id, + NULL::uuid AS repost_id, + t.type::text AS post_type, + t.created_at AS post_date, + t.type::text AS type, + t.content, + t.images, + t.videos, + t.num_likes, + t.num_reposts, + t.num_views, + t.num_quotes, + t.num_replies, + t.num_bookmarks, + t.mentions, + t.created_at, + t.updated_at, + u.username, + u.name, + u.followers, + u.following, + u.avatar_url, + u.cover_url, + u.verified, + u.bio, + NULL::text AS reposted_by_name, + NULL::text AS reposted_by_username, + COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id, + trep.conversation_id AS conversation_id + FROM tweets t + INNER JOIN "user" u ON t.user_id = u.id + LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id + LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id + + UNION ALL + + SELECT + (tr.tweet_id::text || '_' || tr.user_id::text) AS id, + tr.user_id AS profile_user_id, + t.user_id AS tweet_author_id, + tr.tweet_id, + tr.tweet_id AS repost_id, + t.type::text AS post_type, + tr.created_at AS post_date, + 'repost' AS type, + t.content, + t.images, + t.videos, + t.num_likes, + t.num_reposts, + t.num_views, + t.num_quotes, + t.num_replies, + t.num_bookmarks, + t.mentions, + t.created_at, + t.updated_at, + u.username, + u.name, + u.followers, + u.following, + u.avatar_url, + u.cover_url, + u.verified, + u.bio, + reposter.name AS reposted_by_name, + reposter.username AS reposted_by_username, + COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id, + trep.conversation_id AS conversation_id + + FROM tweet_reposts tr + INNER JOIN tweets t ON tr.tweet_id = t.tweet_id + INNER JOIN "user" u ON t.user_id = u.id + INNER JOIN "user" reposter ON tr.user_id = reposter.id + LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id + LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id + `); + await query_runner.query( + `INSERT INTO "typeorm_metadata"("database", "schema", "table", "type", "name", "value") VALUES (DEFAULT, $1, DEFAULT, $2, $3, $4)`, + [ + 'public', + 'VIEW', + 'user_posts_view', + 'SELECT \n t.tweet_id::text AS id,\n t.user_id AS profile_user_id,\n t.user_id AS tweet_author_id,\n t.tweet_id,\n NULL::uuid AS repost_id,\n t.type::text AS post_type,\n t.created_at AS post_date,\n t.type::text AS type,\n t.content,\n t.images,\n t.videos,\n t.num_likes,\n t.num_reposts,\n t.num_views,\n t.num_quotes,\n t.num_replies,\n t.num_bookmarks,\n t.mentions,\n t.created_at,\n t.updated_at,\n u.username,\n u.name,\n u.followers,\n u.following,\n u.avatar_url,\n u.cover_url,\n u.verified,\n u.bio,\n NULL::text AS reposted_by_name,\n NULL::text AS reposted_by_username,\n COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id,\n trep.conversation_id AS conversation_id\n FROM tweets t\n INNER JOIN "user" u ON t.user_id = u.id\n LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id\n LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id\n \n UNION ALL\n \n SELECT \n (tr.tweet_id::text || \'_\' || tr.user_id::text) AS id,\n tr.user_id AS profile_user_id,\n t.user_id AS tweet_author_id,\n tr.tweet_id,\n tr.tweet_id AS repost_id,\n t.type::text AS post_type,\n tr.created_at AS post_date,\n \'repost\' AS type,\n t.content,\n t.images,\n t.videos,\n t.num_likes,\n t.num_reposts,\n t.num_views,\n t.num_quotes,\n t.num_replies,\n t.num_bookmarks,\n t.mentions,\n t.created_at,\n t.updated_at,\n u.username,\n u.name,\n u.followers,\n u.following,\n u.avatar_url,\n u.cover_url,\n u.verified,\n u.bio,\n reposter.name AS reposted_by_name,\n reposter.username AS reposted_by_username,\n COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id,\n trep.conversation_id AS conversation_id\n\n FROM tweet_reposts tr\n INNER JOIN tweets t ON tr.tweet_id = t.tweet_id\n INNER JOIN "user" u ON t.user_id = u.id\n INNER JOIN "user" reposter ON tr.user_id = reposter.id\n LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id\n LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id', + ] + ); + } + + public async down(query_runner: QueryRunner): Promise { + await query_runner.query( + `DELETE FROM "typeorm_metadata" WHERE "type" = $1 AND "name" = $2 AND "schema" = $3`, + ['VIEW', 'user_posts_view', 'public'] + ); + await query_runner.query(`DROP VIEW "user_posts_view"`); + await query_runner.query(`CREATE VIEW "user_posts_view" AS SELECT + t.tweet_id::text AS id, + t.user_id AS profile_user_id, + t.user_id AS tweet_author_id, + t.tweet_id, + NULL::uuid AS repost_id, + 'tweet' AS post_type, + t.created_at AS post_date, + t.type::text AS type, + t.content, + t.images, + t.videos, + t.num_likes, + t.num_reposts, + t.num_views, + t.num_quotes, + t.num_replies, + t.num_bookmarks, + t.mentions, + t.created_at, + t.updated_at, + u.username, + u.name, + u.followers, + u.following, + u.avatar_url, + u.cover_url, + u.verified, + u.bio, + NULL::text AS reposted_by_name, + COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id, + trep.conversation_id AS conversation_id + FROM tweets t + INNER JOIN "user" u ON t.user_id = u.id + LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id + LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id + + UNION ALL + + SELECT + (tr.tweet_id::text || '_' || tr.user_id::text) AS id, + tr.user_id AS profile_user_id, + t.user_id AS tweet_author_id, + tr.tweet_id, + tr.tweet_id AS repost_id, + t.type::text AS post_type, + tr.created_at AS post_date, + 'repost' AS type, + t.content, + t.images, + t.videos, + t.num_likes, + t.num_reposts, + t.num_views, + t.num_quotes, + t.num_replies, + t.num_bookmarks, + t.mentions, + t.created_at, + t.updated_at, + u.username, + u.name, + u.followers, + u.following, + u.avatar_url, + u.cover_url, + u.verified, + u.bio, + reposter.name AS reposted_by_name, + COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id, + trep.conversation_id AS conversation_id + + FROM tweet_reposts tr + INNER JOIN tweets t ON tr.tweet_id = t.tweet_id + INNER JOIN "user" u ON t.user_id = u.id + INNER JOIN "user" reposter ON tr.user_id = reposter.id + LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id + LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id`); + await query_runner.query( + `INSERT INTO "typeorm_metadata"("database", "schema", "table", "type", "name", "value") VALUES (DEFAULT, $1, DEFAULT, $2, $3, $4)`, + [ + 'public', + 'VIEW', + 'user_posts_view', + 'SELECT \n t.tweet_id::text AS id,\n t.user_id AS profile_user_id,\n t.user_id AS tweet_author_id,\n t.tweet_id,\n NULL::uuid AS repost_id,\n \'tweet\' AS post_type,\n t.created_at AS post_date,\n t.type::text AS type,\n t.content,\n t.images,\n t.videos,\n t.num_likes,\n t.num_reposts,\n t.num_views,\n t.num_quotes,\n t.num_replies,\n t.num_bookmarks,\n t.mentions,\n t.created_at,\n t.updated_at,\n u.username,\n u.name,\n u.followers,\n u.following,\n u.avatar_url,\n u.cover_url,\n u.verified,\n u.bio,\n NULL::text AS reposted_by_name,\n COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id,\n trep.conversation_id AS conversation_id\n FROM tweets t\n INNER JOIN "user" u ON t.user_id = u.id\n LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id\n LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id\n \n UNION ALL\n \n SELECT \n (tr.tweet_id::text || \'_\' || tr.user_id::text) AS id,\n tr.user_id AS profile_user_id,\n t.user_id AS tweet_author_id,\n tr.tweet_id,\n tr.tweet_id AS repost_id,\n t.type::text AS post_type,\n tr.created_at AS post_date,\n \'repost\' AS type,\n t.content,\n t.images,\n t.videos,\n t.num_likes,\n t.num_reposts,\n t.num_views,\n t.num_quotes,\n t.num_replies,\n t.num_bookmarks,\n t.mentions,\n t.created_at,\n t.updated_at,\n u.username,\n u.name,\n u.followers,\n u.following,\n u.avatar_url,\n u.cover_url,\n u.verified,\n u.bio,\n reposter.name AS reposted_by_name,\n COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id,\n trep.conversation_id AS conversation_id\n\n FROM tweet_reposts tr\n INNER JOIN tweets t ON tr.tweet_id = t.tweet_id\n INNER JOIN "user" u ON t.user_id = u.id\n INNER JOIN "user" reposter ON tr.user_id = reposter.id\n LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id\n LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id', + ] + ); + } +} diff --git a/src/databases/migrations/1765743134688-addHashtagCreatedAt.ts b/src/databases/migrations/1765743134688-addHashtagCreatedAt.ts new file mode 100644 index 00000000..093240cf --- /dev/null +++ b/src/databases/migrations/1765743134688-addHashtagCreatedAt.ts @@ -0,0 +1,17 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class AddHashtagCreatedAt1765743134688 implements MigrationInterface { + name = 'AddHashtagCreatedAt1765743134688'; + + public async up(query_runner: QueryRunner): Promise { + await query_runner.query(`ALTER TABLE "hashtag" ADD "category" character varying`); + await query_runner.query( + `ALTER TABLE "tweet_hashtags" ADD "tweet_created_at" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now()` + ); + } + + public async down(query_runner: QueryRunner): Promise { + await query_runner.query(`ALTER TABLE "tweet_hashtags" DROP COLUMN "tweet_created_at"`); + await query_runner.query(`ALTER TABLE "hashtag" DROP COLUMN "category"`); + } +} diff --git a/src/databases/migrations/1765799148665-CreateUserTimelineCursor.ts b/src/databases/migrations/1765799148665-CreateUserTimelineCursor.ts new file mode 100644 index 00000000..c1c58922 --- /dev/null +++ b/src/databases/migrations/1765799148665-CreateUserTimelineCursor.ts @@ -0,0 +1,21 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class CreateUserTimelineCursor1765799148665 implements MigrationInterface { + name = 'CreateUserTimelineCursor1765799148665'; + + public async up(query_runner: QueryRunner): Promise { + await query_runner.query( + `CREATE TABLE "user_timeline_cursors" ("user_id" uuid NOT NULL, "last_fetched_tweet_id" uuid, "last_updated_at" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), CONSTRAINT "PK_3ba26dbd089693ecd14cf188a19" PRIMARY KEY ("user_id"))` + ); + await query_runner.query( + `ALTER TABLE "user_timeline_cursors" ADD CONSTRAINT "FK_3ba26dbd089693ecd14cf188a19" FOREIGN KEY ("user_id") REFERENCES "user"("id") ON DELETE CASCADE ON UPDATE NO ACTION` + ); + } + + public async down(query_runner: QueryRunner): Promise { + await query_runner.query( + `ALTER TABLE "user_timeline_cursors" DROP CONSTRAINT "FK_3ba26dbd089693ecd14cf188a19"` + ); + await query_runner.query(`DROP TABLE "user_timeline_cursors"`); + } +} diff --git a/src/databases/migrations/1765799789310-ConvAndParentUserId.ts b/src/databases/migrations/1765799789310-ConvAndParentUserId.ts new file mode 100644 index 00000000..53fddcf5 --- /dev/null +++ b/src/databases/migrations/1765799789310-ConvAndParentUserId.ts @@ -0,0 +1,211 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class ConvAndParentUserId1765799789310 implements MigrationInterface { + name = 'ConvAndParentUserId1765799789310'; + + public async up(query_runner: QueryRunner): Promise { + await query_runner.query( + `DELETE FROM "typeorm_metadata" WHERE "type" = $1 AND "name" = $2 AND "schema" = $3`, + ['VIEW', 'user_posts_view', 'public'] + ); + await query_runner.query(`DROP VIEW "user_posts_view"`); + await query_runner.query(`CREATE VIEW "user_posts_view" AS + SELECT + t.tweet_id::text AS id, + t.user_id AS profile_user_id, + t.user_id AS tweet_author_id, + t.tweet_id, + NULL::uuid AS repost_id, + t.type::text AS post_type, + t.created_at AS post_date, + t.type::text AS type, + t.content, + t.images, + t.videos, + t.num_likes, + t.num_reposts, + t.num_views, + t.num_quotes, + t.num_replies, + t.num_bookmarks, + t.mentions, + t.created_at, + t.updated_at, + u.username, + u.name, + u.followers, + u.following, + u.avatar_url, + u.cover_url, + u.verified, + u.bio, + NULL::text AS reposted_by_name, + NULL::text AS reposted_by_username, + COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id, + trep.conversation_id AS conversation_id, + conv_tweet.user_id AS conversation_user_id, + COALESCE(orig_quote_tweet.user_id, orig_reply_tweet.user_id) AS parent_user_id + FROM tweets t + INNER JOIN "user" u ON t.user_id = u.id + LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id + LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id + LEFT JOIN tweets conv_tweet ON trep.conversation_id = conv_tweet.tweet_id + LEFT JOIN tweets orig_quote_tweet ON tq.original_tweet_id = orig_quote_tweet.tweet_id + LEFT JOIN tweets orig_reply_tweet ON trep.original_tweet_id = orig_reply_tweet.tweet_id + + UNION ALL + + SELECT + (tr.tweet_id::text || '_' || tr.user_id::text) AS id, + tr.user_id AS profile_user_id, + t.user_id AS tweet_author_id, + tr.tweet_id, + tr.tweet_id AS repost_id, + t.type::text AS post_type, + tr.created_at AS post_date, + 'repost' AS type, + t.content, + t.images, + t.videos, + t.num_likes, + t.num_reposts, + t.num_views, + t.num_quotes, + t.num_replies, + t.num_bookmarks, + t.mentions, + t.created_at, + t.updated_at, + u.username, + u.name, + u.followers, + u.following, + u.avatar_url, + u.cover_url, + u.verified, + u.bio, + reposter.name AS reposted_by_name, + reposter.username AS reposted_by_username, + COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id, + trep.conversation_id AS conversation_id, + conv_tweet.user_id AS conversation_user_id, + COALESCE(orig_quote_tweet.user_id, orig_reply_tweet.user_id) AS parent_user_id + + FROM tweet_reposts tr + INNER JOIN tweets t ON tr.tweet_id = t.tweet_id + INNER JOIN "user" u ON t.user_id = u.id + INNER JOIN "user" reposter ON tr.user_id = reposter.id + LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id + LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id + LEFT JOIN tweets conv_tweet ON trep.conversation_id = conv_tweet.tweet_id + LEFT JOIN tweets orig_quote_tweet ON tq.original_tweet_id = orig_quote_tweet.tweet_id + LEFT JOIN tweets orig_reply_tweet ON trep.original_tweet_id = orig_reply_tweet.tweet_id + `); + await query_runner.query( + `INSERT INTO "typeorm_metadata"("database", "schema", "table", "type", "name", "value") VALUES (DEFAULT, $1, DEFAULT, $2, $3, $4)`, + [ + 'public', + 'VIEW', + 'user_posts_view', + 'SELECT \n t.tweet_id::text AS id,\n t.user_id AS profile_user_id,\n t.user_id AS tweet_author_id,\n t.tweet_id,\n NULL::uuid AS repost_id,\n t.type::text AS post_type,\n t.created_at AS post_date,\n t.type::text AS type,\n t.content,\n t.images,\n t.videos,\n t.num_likes,\n t.num_reposts,\n t.num_views,\n t.num_quotes,\n t.num_replies,\n t.num_bookmarks,\n t.mentions,\n t.created_at,\n t.updated_at,\n u.username,\n u.name,\n u.followers,\n u.following,\n u.avatar_url,\n u.cover_url,\n u.verified,\n u.bio,\n NULL::text AS reposted_by_name,\n NULL::text AS reposted_by_username,\n COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id,\n trep.conversation_id AS conversation_id,\n conv_tweet.user_id AS conversation_user_id,\n COALESCE(orig_quote_tweet.user_id, orig_reply_tweet.user_id) AS parent_user_id\n FROM tweets t\n INNER JOIN "user" u ON t.user_id = u.id\n LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id\n LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id\n LEFT JOIN tweets conv_tweet ON trep.conversation_id = conv_tweet.tweet_id\n LEFT JOIN tweets orig_quote_tweet ON tq.original_tweet_id = orig_quote_tweet.tweet_id\n LEFT JOIN tweets orig_reply_tweet ON trep.original_tweet_id = orig_reply_tweet.tweet_id\n \n UNION ALL\n \n SELECT \n (tr.tweet_id::text || \'_\' || tr.user_id::text) AS id,\n tr.user_id AS profile_user_id,\n t.user_id AS tweet_author_id,\n tr.tweet_id,\n tr.tweet_id AS repost_id,\n t.type::text AS post_type,\n tr.created_at AS post_date,\n \'repost\' AS type,\n t.content,\n t.images,\n t.videos,\n t.num_likes,\n t.num_reposts,\n t.num_views,\n t.num_quotes,\n t.num_replies,\n t.num_bookmarks,\n t.mentions,\n t.created_at,\n t.updated_at,\n u.username,\n u.name,\n u.followers,\n u.following,\n u.avatar_url,\n u.cover_url,\n u.verified,\n u.bio,\n reposter.name AS reposted_by_name,\n reposter.username AS reposted_by_username,\n COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id,\n trep.conversation_id AS conversation_id,\n conv_tweet.user_id AS conversation_user_id,\n COALESCE(orig_quote_tweet.user_id, orig_reply_tweet.user_id) AS parent_user_id\n\n FROM tweet_reposts tr\n INNER JOIN tweets t ON tr.tweet_id = t.tweet_id\n INNER JOIN "user" u ON t.user_id = u.id\n INNER JOIN "user" reposter ON tr.user_id = reposter.id\n LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id\n LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id\n LEFT JOIN tweets conv_tweet ON trep.conversation_id = conv_tweet.tweet_id\n LEFT JOIN tweets orig_quote_tweet ON tq.original_tweet_id = orig_quote_tweet.tweet_id\n LEFT JOIN tweets orig_reply_tweet ON trep.original_tweet_id = orig_reply_tweet.tweet_id', + ] + ); + } + + public async down(query_runner: QueryRunner): Promise { + await query_runner.query( + `DELETE FROM "typeorm_metadata" WHERE "type" = $1 AND "name" = $2 AND "schema" = $3`, + ['VIEW', 'user_posts_view', 'public'] + ); + await query_runner.query(`DROP VIEW "user_posts_view"`); + await query_runner.query(`CREATE VIEW "user_posts_view" AS SELECT + t.tweet_id::text AS id, + t.user_id AS profile_user_id, + t.user_id AS tweet_author_id, + t.tweet_id, + NULL::uuid AS repost_id, + t.type::text AS post_type, + t.created_at AS post_date, + t.type::text AS type, + t.content, + t.images, + t.videos, + t.num_likes, + t.num_reposts, + t.num_views, + t.num_quotes, + t.num_replies, + t.num_bookmarks, + t.mentions, + t.created_at, + t.updated_at, + u.username, + u.name, + u.followers, + u.following, + u.avatar_url, + u.cover_url, + u.verified, + u.bio, + NULL::text AS reposted_by_name, + NULL::text AS reposted_by_username, + COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id, + trep.conversation_id AS conversation_id + FROM tweets t + INNER JOIN "user" u ON t.user_id = u.id + LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id + LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id + + UNION ALL + + SELECT + (tr.tweet_id::text || '_' || tr.user_id::text) AS id, + tr.user_id AS profile_user_id, + t.user_id AS tweet_author_id, + tr.tweet_id, + tr.tweet_id AS repost_id, + t.type::text AS post_type, + tr.created_at AS post_date, + 'repost' AS type, + t.content, + t.images, + t.videos, + t.num_likes, + t.num_reposts, + t.num_views, + t.num_quotes, + t.num_replies, + t.num_bookmarks, + t.mentions, + t.created_at, + t.updated_at, + u.username, + u.name, + u.followers, + u.following, + u.avatar_url, + u.cover_url, + u.verified, + u.bio, + reposter.name AS reposted_by_name, + reposter.username AS reposted_by_username, + COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id, + trep.conversation_id AS conversation_id + + FROM tweet_reposts tr + INNER JOIN tweets t ON tr.tweet_id = t.tweet_id + INNER JOIN "user" u ON t.user_id = u.id + INNER JOIN "user" reposter ON tr.user_id = reposter.id + LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id + LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id`); + await query_runner.query( + `INSERT INTO "typeorm_metadata"("database", "schema", "table", "type", "name", "value") VALUES (DEFAULT, $1, DEFAULT, $2, $3, $4)`, + [ + 'public', + 'VIEW', + 'user_posts_view', + 'SELECT \n t.tweet_id::text AS id,\n t.user_id AS profile_user_id,\n t.user_id AS tweet_author_id,\n t.tweet_id,\n NULL::uuid AS repost_id,\n t.type::text AS post_type,\n t.created_at AS post_date,\n t.type::text AS type,\n t.content,\n t.images,\n t.videos,\n t.num_likes,\n t.num_reposts,\n t.num_views,\n t.num_quotes,\n t.num_replies,\n t.num_bookmarks,\n t.mentions,\n t.created_at,\n t.updated_at,\n u.username,\n u.name,\n u.followers,\n u.following,\n u.avatar_url,\n u.cover_url,\n u.verified,\n u.bio,\n NULL::text AS reposted_by_name,\n NULL::text AS reposted_by_username,\n COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id,\n trep.conversation_id AS conversation_id\n FROM tweets t\n INNER JOIN "user" u ON t.user_id = u.id\n LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id\n LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id\n \n UNION ALL\n \n SELECT \n (tr.tweet_id::text || \'_\' || tr.user_id::text) AS id,\n tr.user_id AS profile_user_id,\n t.user_id AS tweet_author_id,\n tr.tweet_id,\n tr.tweet_id AS repost_id,\n t.type::text AS post_type,\n tr.created_at AS post_date,\n \'repost\' AS type,\n t.content,\n t.images,\n t.videos,\n t.num_likes,\n t.num_reposts,\n t.num_views,\n t.num_quotes,\n t.num_replies,\n t.num_bookmarks,\n t.mentions,\n t.created_at,\n t.updated_at,\n u.username,\n u.name,\n u.followers,\n u.following,\n u.avatar_url,\n u.cover_url,\n u.verified,\n u.bio,\n reposter.name AS reposted_by_name,\n reposter.username AS reposted_by_username,\n COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id,\n trep.conversation_id AS conversation_id\n\n FROM tweet_reposts tr\n INNER JOIN tweets t ON tr.tweet_id = t.tweet_id\n INNER JOIN "user" u ON t.user_id = u.id\n INNER JOIN "user" reposter ON tr.user_id = reposter.id\n LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id\n LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id', + ] + ); + } +} diff --git a/src/databases/migrations/1765823235580-AddPositionToUserTimelineCursor.ts b/src/databases/migrations/1765823235580-AddPositionToUserTimelineCursor.ts new file mode 100644 index 00000000..a4cd613b --- /dev/null +++ b/src/databases/migrations/1765823235580-AddPositionToUserTimelineCursor.ts @@ -0,0 +1,17 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class AddPositionToUserTimelineCursor1765823235580 implements MigrationInterface { + public async up(query_runner: QueryRunner): Promise { + await query_runner.query(` + ALTER TABLE "user_timeline_cursors" + ADD COLUMN "last_fetched_position" integer NOT NULL DEFAULT 0 + `); + } + + public async down(query_runner: QueryRunner): Promise { + await query_runner.query(` + ALTER TABLE "user_timeline_cursors" + DROP COLUMN "last_fetched_position" + `); + } +} diff --git a/src/databases/migrations/1765825301002-fixChatFK.ts b/src/databases/migrations/1765825301002-fixChatFK.ts new file mode 100644 index 00000000..ecef11e7 --- /dev/null +++ b/src/databases/migrations/1765825301002-fixChatFK.ts @@ -0,0 +1,36 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class FixChatandBlockMute1765825301002 implements MigrationInterface { + name = 'FixChatandBlockMute1765825301002'; + + public async up(query_runner: QueryRunner): Promise { + await query_runner.query( + `ALTER TABLE "chats" DROP CONSTRAINT "FK_a14c79d67133bb0df4a71807a74"` + ); + await query_runner.query( + `ALTER TABLE "chats" DROP CONSTRAINT "FK_05b8003b6a5c6a9b16cb31fea2a"` + ); + await query_runner.query( + `ALTER TABLE "chats" ADD CONSTRAINT "FK_05b8003b6a5c6a9b16cb31fea2a" FOREIGN KEY ("user1_id") REFERENCES "user"("id") ON DELETE CASCADE ON UPDATE NO ACTION` + ); + await query_runner.query( + `ALTER TABLE "chats" ADD CONSTRAINT "FK_a14c79d67133bb0df4a71807a74" FOREIGN KEY ("user2_id") REFERENCES "user"("id") ON DELETE CASCADE ON UPDATE NO ACTION` + ); + } + + public async down(query_runner: QueryRunner): Promise { + await query_runner.query( + `ALTER TABLE "chats" DROP CONSTRAINT "FK_a14c79d67133bb0df4a71807a74"` + ); + await query_runner.query( + `ALTER TABLE "chats" DROP CONSTRAINT "FK_05b8003b6a5c6a9b16cb31fea2a"` + ); + + await query_runner.query( + `ALTER TABLE "chats" ADD CONSTRAINT "FK_05b8003b6a5c6a9b16cb31fea2a" FOREIGN KEY ("user1_id") REFERENCES "user"("id") ON DELETE NO ACTION ON UPDATE NO ACTION` + ); + await query_runner.query( + `ALTER TABLE "chats" ADD CONSTRAINT "FK_a14c79d67133bb0df4a71807a74" FOREIGN KEY ("user2_id") REFERENCES "user"("id") ON DELETE NO ACTION ON UPDATE NO ACTION` + ); + } +} diff --git a/src/databases/migrations/1765826464278-fixBlockMute.ts b/src/databases/migrations/1765826464278-fixBlockMute.ts new file mode 100644 index 00000000..b47e6c62 --- /dev/null +++ b/src/databases/migrations/1765826464278-fixBlockMute.ts @@ -0,0 +1,35 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class FixBlockMute1765826464278 implements MigrationInterface { + name = 'FixBlockMute1765826464278'; + + public async up(query_runner: QueryRunner): Promise { + await query_runner.query( + `ALTER TABLE "user_blocks" ADD CONSTRAINT "FK_dfcd8a81016d1de587fbd2d70bf" FOREIGN KEY ("blocker_id") REFERENCES "user"("id") ON DELETE CASCADE ON UPDATE NO ACTION` + ); + await query_runner.query( + `ALTER TABLE "user_blocks" ADD CONSTRAINT "FK_7a0806a54f0ad9ced3e247cacd1" FOREIGN KEY ("blocked_id") REFERENCES "user"("id") ON DELETE CASCADE ON UPDATE NO ACTION` + ); + await query_runner.query( + `ALTER TABLE "user_mutes" ADD CONSTRAINT "FK_3c5a99ffecb6ebcfa39c0ec89e3" FOREIGN KEY ("muter_id") REFERENCES "user"("id") ON DELETE CASCADE ON UPDATE NO ACTION` + ); + await query_runner.query( + `ALTER TABLE "user_mutes" ADD CONSTRAINT "FK_0574bdce9d2af99028b0e6f9ba5" FOREIGN KEY ("muted_id") REFERENCES "user"("id") ON DELETE CASCADE ON UPDATE NO ACTION` + ); + } + + public async down(query_runner: QueryRunner): Promise { + await query_runner.query( + `ALTER TABLE "user_mutes" DROP CONSTRAINT "FK_0574bdce9d2af99028b0e6f9ba5"` + ); + await query_runner.query( + `ALTER TABLE "user_mutes" DROP CONSTRAINT "FK_3c5a99ffecb6ebcfa39c0ec89e3"` + ); + await query_runner.query( + `ALTER TABLE "user_blocks" DROP CONSTRAINT "FK_7a0806a54f0ad9ced3e247cacd1"` + ); + await query_runner.query( + `ALTER TABLE "user_blocks" DROP CONSTRAINT "FK_dfcd8a81016d1de587fbd2d70bf"` + ); + } +} diff --git a/src/elasticsearch/elasticsearch.module.spec.ts b/src/elasticsearch/elasticsearch.module.spec.ts new file mode 100644 index 00000000..28c20dac --- /dev/null +++ b/src/elasticsearch/elasticsearch.module.spec.ts @@ -0,0 +1,213 @@ +import { Test, TestingModule } from '@nestjs/testing'; +import { ElasticsearchModule } from './elasticsearch.module'; +import { ElasticsearchModule as NestElasticsearchModule } from '@nestjs/elasticsearch'; +import { ConfigModule, ConfigService } from '@nestjs/config'; +import { ElasticsearchSetupService } from './elasticsearch-setup.service'; +import { TweetSeederService } from './seeders/tweets-seeder.service'; +import { getRepositoryToken } from '@nestjs/typeorm'; +import { User } from 'src/user/entities/user.entity'; +import { Tweet } from 'src/tweets/entities/tweet.entity'; + +describe('ElasticsearchModule', () => { + let module: TestingModule; + + const mock_repository = { + find: jest.fn(), + findOne: jest.fn(), + save: jest.fn(), + create: jest.fn(), + update: jest.fn(), + delete: jest.fn(), + }; + + beforeEach(async () => { + module = await Test.createTestingModule({ + imports: [ElasticsearchModule], + }) + .overrideProvider(ConfigService) + .useValue({ + get: jest.fn((key: string) => { + const config = { + ELASTICSEARCH_NODE: 'http://test-node:9200', + ELASTICSEARCH_USERNAME: 'test-user', + ELASTICSEARCH_PASSWORD: 'test-password', + }; + return config[key]; + }), + }) + .overrideProvider(getRepositoryToken(User)) + .useValue(mock_repository) + .overrideProvider(getRepositoryToken(Tweet)) + .useValue(mock_repository) + .overrideProvider(ElasticsearchSetupService) + .useValue({ + setupIndices: jest.fn(), + createIndex: jest.fn(), + }) + .overrideProvider(TweetSeederService) + .useValue({ + seed: jest.fn(), + }) + .compile(); + }); + + it('should be defined', () => { + expect(module).toBeDefined(); + }); + + it('should have ElasticsearchSetupService', () => { + const service = module.get(ElasticsearchSetupService); + expect(service).toBeDefined(); + }); + + it('should have TweetSeederService', () => { + const service = module.get(TweetSeederService); + expect(service).toBeDefined(); + }); + + it('should export NestElasticsearchModule', () => { + const exports = Reflect.getMetadata('exports', ElasticsearchModule); + expect(exports).toContain(NestElasticsearchModule); + }); + + it('should export ElasticsearchSetupService', () => { + const exports = Reflect.getMetadata('exports', ElasticsearchModule); + expect(exports).toContain(ElasticsearchSetupService); + }); + + it('should export TweetSeederService', () => { + const exports = Reflect.getMetadata('exports', ElasticsearchModule); + expect(exports).toContain(TweetSeederService); + }); + + it('should import ConfigModule', () => { + const imports = Reflect.getMetadata('imports', ElasticsearchModule); + expect(imports).toContain(ConfigModule); + }); + + it('should have User repository available', () => { + const repository = module.get(getRepositoryToken(User)); + expect(repository).toBeDefined(); + }); + + it('should have Tweet repository available', () => { + const repository = module.get(getRepositoryToken(Tweet)); + expect(repository).toBeDefined(); + }); +}); + +describe('custom configuration values', () => { + it('should use custom node when ELASTICSEARCH_NODE is provided', () => { + const mock_config_service = { + get: jest.fn().mockReturnValue('http://custom:9200'), + }; + + const node = mock_config_service.get('ELASTICSEARCH_NODE') || 'http://localhost:9200'; + + expect(node).toBe('http://custom:9200'); + }); + + it('should use custom username when ELASTICSEARCH_USERNAME is provided', () => { + const mock_config_service = { + get: jest.fn().mockReturnValue('custom-user'), + }; + + const username = mock_config_service.get('ELASTICSEARCH_USERNAME') || 'elastic'; + + expect(username).toBe('custom-user'); + }); + + it('should use custom password when ELASTICSEARCH_PASSWORD is provided', () => { + const mock_config_service = { + get: jest.fn().mockReturnValue('custom-pass'), + }; + + const password = mock_config_service.get('ELASTICSEARCH_PASSWORD') || 'dummy_password'; + + expect(password).toBe('custom-pass'); + }); + + it('should use all custom values when all env vars are provided', () => { + const mock_config_service = { + get: jest.fn((key: string) => { + const config = { + ELASTICSEARCH_NODE: 'http://custom:9200', + ELASTICSEARCH_USERNAME: 'custom-user', + ELASTICSEARCH_PASSWORD: 'custom-pass', + }; + return config[key]; + }), + }; + + const config = { + node: mock_config_service.get('ELASTICSEARCH_NODE') || 'http://localhost:9200', + auth: { + username: mock_config_service.get('ELASTICSEARCH_USERNAME') || 'elastic', + password: mock_config_service.get('ELASTICSEARCH_PASSWORD') || 'dummy_password', + }, + tls: { + rejectUnauthorized: false, + }, + }; + + expect(config.node).toBe('http://custom:9200'); + expect(config.auth.username).toBe('custom-user'); + expect(config.auth.password).toBe('custom-pass'); + }); +}); + +describe('mixed configuration (some custom, some default)', () => { + it('should use custom node but default credentials', () => { + const mock_config_service = { + get: jest.fn((key: string) => { + return key === 'ELASTICSEARCH_NODE' ? 'http://custom:9200' : undefined; + }), + }; + + const config = { + node: mock_config_service.get('ELASTICSEARCH_NODE') || 'http://localhost:9200', + auth: { + username: mock_config_service.get('ELASTICSEARCH_USERNAME') || 'elastic', + password: mock_config_service.get('ELASTICSEARCH_PASSWORD') || 'dummy_password', + }, + }; + + expect(config.node).toBe('http://custom:9200'); + expect(config.auth.username).toBe('elastic'); + expect(config.auth.password).toBe('dummy_password'); + }); + + it('should use default node but custom credentials', () => { + const mock_config_service = { + get: jest.fn((key: string) => { + if (key === 'ELASTICSEARCH_USERNAME') return 'custom-user'; + if (key === 'ELASTICSEARCH_PASSWORD') return 'custom-pass'; + return undefined; + }), + }; + + const config = { + node: mock_config_service.get('ELASTICSEARCH_NODE') || 'http://localhost:9200', + auth: { + username: mock_config_service.get('ELASTICSEARCH_USERNAME') || 'elastic', + password: mock_config_service.get('ELASTICSEARCH_PASSWORD') || 'dummy_password', + }, + }; + + expect(config.node).toBe('http://localhost:9200'); + expect(config.auth.username).toBe('custom-user'); + expect(config.auth.password).toBe('custom-pass'); + }); +}); + +describe('TLS configuration', () => { + it('should always set rejectUnauthorized to false', () => { + const config = { + tls: { + rejectUnauthorized: false, + }, + }; + + expect(config.tls.rejectUnauthorized).toBe(false); + }); +}); diff --git a/src/elasticsearch/elasticsearch.module.ts b/src/elasticsearch/elasticsearch.module.ts index a345c117..208a47fb 100644 --- a/src/elasticsearch/elasticsearch.module.ts +++ b/src/elasticsearch/elasticsearch.module.ts @@ -2,7 +2,6 @@ import { Module } from '@nestjs/common'; import { ElasticsearchModule as NestElasticsearchModule } from '@nestjs/elasticsearch'; import { ConfigModule, ConfigService } from '@nestjs/config'; import { ElasticsearchSetupService } from './elasticsearch-setup.service'; -import { UserSeederService } from './seeders/user-seeder.service'; import { TweetSeederService } from './seeders/tweets-seeder.service'; import { TypeOrmModule } from '@nestjs/typeorm'; import { User } from 'src/user/entities/user.entity'; @@ -28,12 +27,7 @@ import { Tweet } from 'src/tweets/entities/tweet.entity'; TypeOrmModule.forFeature([User]), TypeOrmModule.forFeature([Tweet]), ], - providers: [ElasticsearchSetupService, UserSeederService, TweetSeederService], - exports: [ - NestElasticsearchModule, - ElasticsearchSetupService, - UserSeederService, - TweetSeederService, - ], + providers: [ElasticsearchSetupService, TweetSeederService], + exports: [NestElasticsearchModule, ElasticsearchSetupService, TweetSeederService], }) export class ElasticsearchModule {} diff --git a/src/elasticsearch/schemas/index.ts b/src/elasticsearch/schemas/index.ts index c23196ef..bd235c5e 100644 --- a/src/elasticsearch/schemas/index.ts +++ b/src/elasticsearch/schemas/index.ts @@ -1,12 +1,9 @@ import { tweets_index_config } from './tweets.schema'; -import { users_index_config } from './users.schema'; export const ELASTICSEARCH_INDICES = { - USERS: 'users', TWEETS: 'tweets', }; export const INDEX_CONFIGS = { - [ELASTICSEARCH_INDICES.USERS]: users_index_config, [ELASTICSEARCH_INDICES.TWEETS]: tweets_index_config, }; diff --git a/src/elasticsearch/schemas/tweets.schema.ts b/src/elasticsearch/schemas/tweets.schema.ts index 5fd38187..d76408b6 100644 --- a/src/elasticsearch/schemas/tweets.schema.ts +++ b/src/elasticsearch/schemas/tweets.schema.ts @@ -9,6 +9,9 @@ export const tweets_index_config = { tokenizer: 'standard', filter: ['lowercase', 'stop', 'snowball'], }, + arabic_analyzer: { + type: 'arabic', + }, autocomplete_analyzer: { type: 'custom', tokenizer: 'autocomplete_tokenizer', @@ -51,11 +54,18 @@ export const tweets_index_config = { analyzer: 'autocomplete_analyzer', search_analyzer: 'autocomplete_search_analyzer', }, + arabic: { + type: 'text', + analyzer: 'arabic_analyzer', + }, }, }, hashtags: { type: 'keyword', }, + mentions: { + type: 'keyword', + }, created_at: { type: 'date', }, @@ -87,6 +97,15 @@ export const tweets_index_config = { type: 'keyword', ignore_above: 256, }, + autocomplete: { + type: 'text', + analyzer: 'autocomplete_analyzer', + search_analyzer: 'autocomplete_search_analyzer', + }, + arabic: { + type: 'text', + analyzer: 'arabic_analyzer', + }, }, }, username: { diff --git a/src/elasticsearch/schemas/users.schema.ts b/src/elasticsearch/schemas/users.schema.ts deleted file mode 100644 index ec53beac..00000000 --- a/src/elasticsearch/schemas/users.schema.ts +++ /dev/null @@ -1,67 +0,0 @@ -export const users_index_config = { - settings: { - number_of_shards: 1, - number_of_replicas: 1, - analysis: { - analyzer: { - username_analyzer: { - type: 'custom', - tokenizer: 'keyword', - filter: ['lowercase'], - }, - autocomplete_analyzer: { - type: 'custom', - tokenizer: 'edge_ngram_tokenizer', - filter: ['lowercase'], - }, - autocomplete_search_analyzer: { - type: 'custom', - tokenizer: 'keyword', - filter: ['lowercase'], - }, - }, - tokenizer: { - edge_ngram_tokenizer: { - type: 'edge_ngram', - min_gram: 2, - max_gram: 20, - token_chars: ['letter', 'digit'], - }, - }, - }, - }, - mappings: { - properties: { - user_id: { type: 'keyword' }, - username: { - type: 'text', - analyzer: 'username_analyzer', - fields: { - keyword: { type: 'keyword' }, - autocomplete: { - type: 'text', - analyzer: 'autocomplete_analyzer', - search_analyzer: 'autocomplete_search_analyzer', - }, - }, - }, - name: { - type: 'text', - fields: { - keyword: { type: 'keyword' }, - autocomplete: { - type: 'text', - analyzer: 'autocomplete_analyzer', - search_analyzer: 'autocomplete_search_analyzer', - }, - }, - }, - followers: { type: 'integer' }, - following: { type: 'integer' }, - verified: { type: 'boolean' }, - bio: { type: 'text' }, - avatar_url: { type: 'keyword', index: false }, - country: { type: 'keyword' }, - }, - }, -}; diff --git a/src/elasticsearch/scripts/es-seed.ts b/src/elasticsearch/scripts/es-seed.ts index 9b32d3a0..32ad4ae0 100644 --- a/src/elasticsearch/scripts/es-seed.ts +++ b/src/elasticsearch/scripts/es-seed.ts @@ -1,6 +1,5 @@ import { NestFactory } from '@nestjs/core'; import { AppModule } from '../../app.module'; -import { UserSeederService } from '../seeders/user-seeder.service'; import { ElasticsearchSetupService } from '../elasticsearch-setup.service'; import { TweetSeederService } from '../seeders/tweets-seeder.service'; @@ -13,15 +12,11 @@ async function bootstrap() { try { const es_setup = app.get(ElasticsearchSetupService); - const user_seeder = app.get(UserSeederService); const tweets_seeder = app.get(TweetSeederService); console.log('📋 Setting up indices...'); await es_setup.setupIndices(); - console.log('đŸ‘Ĩ Seeding users...'); - await user_seeder.seedUsers(); - console.log('📝 Seeding tweets...'); await tweets_seeder.seedTweets(); diff --git a/src/elasticsearch/seeders/tweets-seeder.service.ts b/src/elasticsearch/seeders/tweets-seeder.service.ts index 77dd1643..66a33c71 100644 --- a/src/elasticsearch/seeders/tweets-seeder.service.ts +++ b/src/elasticsearch/seeders/tweets-seeder.service.ts @@ -13,7 +13,7 @@ export class TweetSeederService { constructor( @InjectRepository(Tweet) - private tweets_repository: Repository, + private readonly tweets_repository: Repository, private readonly elasticsearch_service: ElasticsearchService, private readonly data_source: DataSource ) {} @@ -122,6 +122,7 @@ export class TweetSeederService { following: tweet.user?.following || 0, images: tweet.images || [], videos: tweet.videos || [], + mentions: tweet.mentions || [], bio: tweet.user?.bio, avatar_url: tweet.user?.avatar_url, }; @@ -142,7 +143,7 @@ export class TweetSeederService { private extractHashtags(content: string): string[] { if (!content) return []; - const regex = /#[\w]+/g; + const regex = /#[\p{L}\p{N}_]+/gu; const matches = content.match(regex); if (!matches) return []; diff --git a/src/elasticsearch/seeders/user-seeder.service.ts b/src/elasticsearch/seeders/user-seeder.service.ts deleted file mode 100644 index 07dea386..00000000 --- a/src/elasticsearch/seeders/user-seeder.service.ts +++ /dev/null @@ -1,82 +0,0 @@ -import { Injectable, Logger } from '@nestjs/common'; -import { ElasticsearchService } from '@nestjs/elasticsearch'; -import { User } from 'src/user/entities'; -import { ELASTICSEARCH_INDICES } from '../schemas'; -import { Repository } from 'typeorm'; -import { InjectRepository } from '@nestjs/typeorm'; - -@Injectable() -export class UserSeederService { - private readonly logger = new Logger(UserSeederService.name); - private readonly BATCH_SIZE = 1000; - - constructor( - @InjectRepository(User) - private user_repository: Repository, - private readonly elasticsearch_service: ElasticsearchService - ) {} - - async seedUsers(): Promise { - this.logger.log('Starting user indexing...'); - - const total_users = await this.user_repository.count(); - this.logger.log(`Total users to index: ${total_users}`); - - let offset = 0; - let indexed = 0; - - while (offset < total_users) { - const users = await this.user_repository.find({ - skip: offset, - take: this.BATCH_SIZE, - }); - - if (users.length === 0) break; - - await this.bulkIndexUsers(users); - - indexed += users.length; - offset += this.BATCH_SIZE; - - this.logger.log(`Indexed ${indexed}/${total_users} users`); - } - - this.logger.log('User indexing completed'); - } - - private async bulkIndexUsers(users: User[]): Promise { - const operations = users.flatMap((user) => [ - { index: { _index: ELASTICSEARCH_INDICES.USERS, _id: user.id } }, - this.transformUserForES(user), - ]); - - if (operations.length === 0) return; - - try { - const result = await this.elasticsearch_service.bulk({ - refresh: false, - operations, - }); - - if (result.errors) { - this.logger.error('Bulk indexing had errors', result.items); - } - } catch (error) { - this.logger.error('Failed to bulk index users', error); - throw error; - } - } - - private transformUserForES(user: User) { - return { - user_id: user.id, - username: user.username, - name: user.name, - followers: user.followers, - following: user.following, - verified: user.verified, - bio: user.bio, - avatar_url: user.avatar_url, - }; - } -} diff --git a/src/explore/explore.controller.spec.ts b/src/explore/explore.controller.spec.ts index 126a5fb5..c9cc4094 100644 --- a/src/explore/explore.controller.spec.ts +++ b/src/explore/explore.controller.spec.ts @@ -40,11 +40,13 @@ describe('ExploreController', () => { it('should call explore_service.getExploreData with user_id', async () => { const user_id = 'user-123'; const expected_result = { - trending: [], + trending: { data: [] }, who_to_follow: [], for_you: [], }; - const spy = jest.spyOn(service, 'getExploreData').mockResolvedValue(expected_result); + const spy = jest + .spyOn(service, 'getExploreData') + .mockResolvedValue(expected_result as any); const result = await controller.getExploreData(user_id); @@ -55,10 +57,11 @@ describe('ExploreController', () => { describe('getWhoToFollow', () => { it('should call explore_service.getWhoToFollow', async () => { + const user_id = 'user-123'; const expected_result = []; const spy = jest.spyOn(service, 'getWhoToFollow').mockResolvedValue(expected_result); - const result = await controller.getWhoToFollow(); + const result = await controller.getWhoToFollow(user_id); expect(spy).toHaveBeenCalledTimes(1); expect(result).toEqual(expected_result); @@ -69,8 +72,8 @@ describe('ExploreController', () => { it('should call explore_service.getCategoryTrending with correct parameters', async () => { const category_id = '21'; const user_id = 'user-123'; - const page = 1; - const limit = 20; + const page = '1'; + const limit = '20'; const expected_result = { category: { id: 21, name: 'Sports' }, tweets: [], @@ -87,7 +90,7 @@ describe('ExploreController', () => { limit ); - expect(spy).toHaveBeenCalledWith(category_id, user_id, page, limit); + expect(spy).toHaveBeenCalledWith(category_id, user_id, 1, 20); expect(result).toEqual(expected_result); }); @@ -97,12 +100,73 @@ describe('ExploreController', () => { const spy = jest.spyOn(service, 'getCategoryTrending').mockResolvedValue({ category: null, tweets: [], - pagination: { page: 1, hasMore: false }, - }); + page: 1, + limit: 20, + hasMore: false, + } as any); await controller.getCategoryWiseTrending(category_id, user_id); expect(spy).toHaveBeenCalledWith(category_id, user_id, 1, 20); }); + + it('should parse string page and limit to numbers', async () => { + const category_id = '5'; + const user_id = 'user-456'; + const page = '3'; + const limit = '15'; + const spy = jest.spyOn(service, 'getCategoryTrending').mockResolvedValue({ + category: { id: 5, name: 'Technology' }, + tweets: [], + pagination: { page: 3, hasMore: true }, + }); + + await controller.getCategoryWiseTrending(category_id, user_id, page, limit); + + expect(spy).toHaveBeenCalledWith(category_id, user_id, 3, 15); + }); + + it('should work without user_id', async () => { + const category_id = '10'; + const spy = jest.spyOn(service, 'getCategoryTrending').mockResolvedValue({ + category: { id: 10, name: 'Entertainment' }, + tweets: [], + pagination: { page: 1, hasMore: false }, + } as any); + + await controller.getCategoryWiseTrending(category_id, '' as any); + + expect(spy).toHaveBeenCalledWith(category_id, '', 1, 20); + }); + + it('should handle custom page without limit', async () => { + const category_id = '7'; + const user_id = 'user-789'; + const page = '2'; + const spy = jest.spyOn(service, 'getCategoryTrending').mockResolvedValue({ + category: { id: 7, name: 'Sports' }, + tweets: [], + pagination: { page: 2, hasMore: false }, + }); + + await controller.getCategoryWiseTrending(category_id, user_id, page); + + expect(spy).toHaveBeenCalledWith(category_id, user_id, 2, 20); + }); + + it('should handle custom limit without page', async () => { + const category_id = '12'; + const user_id = 'user-101'; + const limit = '10'; + const spy = jest.spyOn(service, 'getCategoryTrending').mockResolvedValue({ + category: { id: 12, name: 'News' }, + tweets: [], + pagination: { page: 1, hasMore: false }, + }); + + await controller.getCategoryWiseTrending(category_id, user_id, undefined, limit); + + expect(spy).toHaveBeenCalledWith(category_id, user_id, 1, 10); + }); }); }); diff --git a/src/explore/explore.controller.ts b/src/explore/explore.controller.ts index 363b2aa5..b48345de 100644 --- a/src/explore/explore.controller.ts +++ b/src/explore/explore.controller.ts @@ -31,6 +31,7 @@ import { JwtStrategy } from 'src/auth/strategies/jwt.strategy'; @ApiBearerAuth('JWT-auth') @Controller('explore') export class ExploreController { + /* istanbul ignore next */ constructor(private readonly explore_service: ExploreService) {} @ApiOperation(explore_root_swagger.operation) @@ -69,8 +70,8 @@ export class ExploreController { @Query('page') page?: string, @Query('limit') limit?: string ) { - const parsed_page = page ? parseInt(page, 10) : 1; - const parsed_limit = limit ? parseInt(limit, 10) : 20; + const parsed_page = page ? Number.parseInt(page, 10) : 1; + const parsed_limit = limit ? Number.parseInt(limit, 10) : 20; return await this.explore_service.getCategoryTrending( category_id, user_id, diff --git a/src/explore/explore.module.ts b/src/explore/explore.module.ts index 725da39c..5e424084 100644 --- a/src/explore/explore.module.ts +++ b/src/explore/explore.module.ts @@ -2,6 +2,7 @@ import { Module } from '@nestjs/common'; import { TypeOrmModule } from '@nestjs/typeorm'; import { ExploreController } from './explore.controller'; import { ExploreService } from './explore.service'; +import { WhoToFollowService } from './who-to-follow.service'; import { RedisModuleConfig } from '../redis/redis.module'; import { Category } from '../category/entities/category.entity'; import { TweetsModule } from '../tweets/tweets.module'; @@ -18,7 +19,7 @@ import { TrendModule } from 'src/trend/trend.module'; TrendModule, ], controllers: [ExploreController], - providers: [ExploreService], - exports: [ExploreService], + providers: [ExploreService, WhoToFollowService], + exports: [ExploreService, WhoToFollowService], }) export class ExploreModule {} diff --git a/src/explore/explore.service.spec.ts b/src/explore/explore.service.spec.ts index 4228fefc..746eb0d1 100644 --- a/src/explore/explore.service.spec.ts +++ b/src/explore/explore.service.spec.ts @@ -6,6 +6,7 @@ import { Category } from '../category/entities/category.entity'; import { UserInterests } from '../user/entities/user-interests.entity'; import { TweetsService } from '../tweets/tweets.service'; import { TrendService } from '../trend/trend.service'; +import { WhoToFollowService } from './who-to-follow.service'; import { getRepositoryToken } from '@nestjs/typeorm'; import { UserRepository } from '../user/user.repository'; @@ -17,6 +18,7 @@ describe('ExploreService', () => { let user_repository: UserRepository; let tweets_service: TweetsService; let trend_service: TrendService; + let who_to_follow_service: WhoToFollowService; const mock_redis_service = { zrevrange: jest.fn(), @@ -28,6 +30,17 @@ describe('ExploreService', () => { find: jest.fn(), }; + const mock_category_query_builder = { + where: jest.fn().mockReturnThis(), + andWhere: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + limit: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([]), + }; + + // Ensure category repository supports createQueryBuilder in tests + mock_category_repository['createQueryBuilder'] = jest.fn(() => mock_category_query_builder); + const mock_user_interests_repository = { createQueryBuilder: jest.fn(), }; @@ -51,6 +64,10 @@ describe('ExploreService', () => { getTrending: jest.fn(), }; + const mock_who_to_follow_service = { + getWhoToFollow: jest.fn(), + }; + beforeEach(async () => { const module: TestingModule = await Test.createTestingModule({ providers: [ @@ -64,6 +81,7 @@ describe('ExploreService', () => { { provide: UserRepository, useValue: mock_user_repository }, { provide: TweetsService, useValue: mock_tweets_service }, { provide: TrendService, useValue: mock_trend_service }, + { provide: WhoToFollowService, useValue: mock_who_to_follow_service }, ], }).compile(); @@ -76,6 +94,7 @@ describe('ExploreService', () => { user_repository = module.get(UserRepository); tweets_service = module.get(TweetsService); trend_service = module.get(TrendService); + who_to_follow_service = module.get(WhoToFollowService); }); afterEach(() => { @@ -89,11 +108,13 @@ describe('ExploreService', () => { describe('getExploreData', () => { it('should return trending, who to follow, and for you posts', async () => { const mock_trending = ['topic1', 'topic2']; - const mock_who_to_follow = []; - const mock_for_you = [{ category: { id: 1 }, tweets: [] }]; + const mock_who_to_follow: any[] = []; + const mock_for_you = [{ category: { id: 1 }, tweets: [] as any[] }]; jest.spyOn(trend_service, 'getTrending').mockResolvedValue(mock_trending as any); - jest.spyOn(service, 'getWhoToFollow').mockResolvedValue(mock_who_to_follow); + jest.spyOn(who_to_follow_service, 'getWhoToFollow').mockResolvedValue( + mock_who_to_follow + ); jest.spyOn(service, 'getForYouPosts').mockResolvedValue(mock_for_you as any); const result = await service.getExploreData('user-123'); @@ -104,62 +125,53 @@ describe('ExploreService', () => { for_you: mock_for_you, }); expect(trend_service.getTrending).toHaveBeenCalledWith('global', 5); - expect(service.getWhoToFollow).toHaveBeenCalledWith('user-123', 3); + expect(who_to_follow_service.getWhoToFollow).toHaveBeenCalledWith('user-123', 30); expect(service.getForYouPosts).toHaveBeenCalledWith('user-123'); }); it('should work without current user id', async () => { - jest.spyOn(trend_service, 'getTrending').mockResolvedValue([]); - jest.spyOn(service, 'getWhoToFollow').mockResolvedValue([]); + jest.spyOn(trend_service, 'getTrending').mockResolvedValue([] as any); + jest.spyOn(who_to_follow_service, 'getWhoToFollow').mockResolvedValue([]); jest.spyOn(service, 'getForYouPosts').mockResolvedValue([]); const result = await service.getExploreData(); expect(result).toBeDefined(); - expect(service.getWhoToFollow).toHaveBeenCalledWith(undefined, 3); + expect(who_to_follow_service.getWhoToFollow).toHaveBeenCalledWith(undefined, 30); expect(service.getForYouPosts).toHaveBeenCalledWith(undefined); }); }); describe('getWhoToFollow', () => { it('should return 30 random users with relationships when user is logged in', async () => { - const mock_users = [ + const mock_result = [ { - user_id: 'user-1', - user_username: 'john_doe', - user_name: 'John Doe', - user_bio: 'Software Engineer', - user_avatar_url: 'https://example.com/avatar1.jpg', - user_verified: true, - user_followers: 100, - user_following: 50, + id: 'user-1', + username: 'john_doe', + name: 'John Doe', + bio: 'Software Engineer', + avatar_url: 'https://example.com/avatar1.jpg', + verified: true, + followers: 100, + following: 50, is_following: true, is_followed: false, }, { - user_id: 'user-2', - user_username: 'jane_smith', - user_name: 'Jane Smith', - user_bio: 'Designer', - user_avatar_url: 'https://example.com/avatar2.jpg', - user_verified: false, - user_followers: 200, - user_following: 150, + id: 'user-2', + username: 'jane_smith', + name: 'Jane Smith', + bio: 'Designer', + avatar_url: 'https://example.com/avatar2.jpg', + verified: false, + followers: 200, + following: 150, is_following: false, is_followed: true, }, ]; - const mock_query_builder = { - select: jest.fn().mockReturnThis(), - orderBy: jest.fn().mockReturnThis(), - limit: jest.fn().mockReturnThis(), - addSelect: jest.fn().mockReturnThis(), - setParameter: jest.fn().mockReturnThis(), - getRawMany: jest.fn().mockResolvedValue(mock_users), - }; - - mock_user_repository.createQueryBuilder.mockReturnValue(mock_query_builder); + mock_who_to_follow_service.getWhoToFollow.mockResolvedValue(mock_result); const result = await service.getWhoToFollow('current-user-id'); @@ -176,37 +188,29 @@ describe('ExploreService', () => { is_following: true, is_followed: false, }); - expect(mock_query_builder.addSelect).toHaveBeenCalled(); - expect(mock_query_builder.setParameter).toHaveBeenCalledWith( - 'current_user_id', - 'current-user-id' + expect(mock_who_to_follow_service.getWhoToFollow).toHaveBeenCalledWith( + 'current-user-id', + 30 ); }); it('should return users without relationship data when no user is logged in', async () => { - const mock_users = [ + const mock_result = [ { - user_id: 'user-1', - user_username: 'john_doe', - user_name: 'John Doe', - user_bio: 'Software Engineer', - user_avatar_url: 'https://example.com/avatar1.jpg', - user_verified: true, - user_followers: 100, - user_following: 50, + id: 'user-1', + username: 'john_doe', + name: 'John Doe', + bio: 'Software Engineer', + avatar_url: 'https://example.com/avatar1.jpg', + verified: true, + followers: 100, + following: 50, + is_following: false, + is_followed: false, }, ]; - const mock_query_builder = { - select: jest.fn().mockReturnThis(), - orderBy: jest.fn().mockReturnThis(), - limit: jest.fn().mockReturnThis(), - addSelect: jest.fn().mockReturnThis(), - setParameter: jest.fn().mockReturnThis(), - getRawMany: jest.fn().mockResolvedValue(mock_users), - }; - - mock_user_repository.createQueryBuilder.mockReturnValue(mock_query_builder); + mock_who_to_follow_service.getWhoToFollow.mockResolvedValue(mock_result); const result = await service.getWhoToFollow(); @@ -223,33 +227,26 @@ describe('ExploreService', () => { is_following: false, is_followed: false, }); - expect(mock_query_builder.addSelect).not.toHaveBeenCalled(); + expect(mock_who_to_follow_service.getWhoToFollow).toHaveBeenCalledWith(undefined, 30); }); it('should handle users with null values', async () => { - const mock_users = [ + const mock_result = [ { - user_id: 'user-1', - user_username: 'john_doe', - user_name: 'John Doe', - user_bio: null, - user_avatar_url: null, - user_verified: null, - user_followers: null, - user_following: null, + id: 'user-1', + username: 'john_doe', + name: 'John Doe', + bio: '', + avatar_url: '', + verified: false, + followers: 0, + following: 0, + is_following: false, + is_followed: false, }, ]; - const mock_query_builder = { - select: jest.fn().mockReturnThis(), - orderBy: jest.fn().mockReturnThis(), - limit: jest.fn().mockReturnThis(), - addSelect: jest.fn().mockReturnThis(), - setParameter: jest.fn().mockReturnThis(), - getRawMany: jest.fn().mockResolvedValue(mock_users), - }; - - mock_user_repository.createQueryBuilder.mockReturnValue(mock_query_builder); + mock_who_to_follow_service.getWhoToFollow.mockResolvedValue(mock_result); const result = await service.getWhoToFollow(); @@ -409,10 +406,13 @@ describe('ExploreService', () => { it('should use default categories when user has no interests', async () => { const mock_default_cats = [ - { id: 21, name: 'Sports' }, - { id: 20, name: 'Tech' }, + { id: 2, name: 'Category2' }, + { id: 3, name: 'Category3' }, + { id: 5, name: 'Category5' }, + { id: 4, name: 'Category4' }, + { id: 15, name: 'Category15' }, ]; - const mock_tweet_ids = [['tweet-1'], ['tweet-2']]; + const mock_tweet_ids = [['tweet-1'], ['tweet-2'], [], [], []]; const mock_tweets = [ { tweet_id: 'tweet-1', content: 'test1' }, { tweet_id: 'tweet-2', content: 'test2' }, @@ -427,27 +427,121 @@ describe('ExploreService', () => { }; mock_user_interests_repository.createQueryBuilder.mockReturnValue(mock_query_builder); - mock_category_repository.find.mockResolvedValue(mock_default_cats); + mock_category_query_builder.getMany.mockResolvedValue(mock_default_cats); mock_redis_service.zrevrangeMultiple.mockResolvedValue(mock_tweet_ids); mock_tweets_service.getTweetsByIds.mockResolvedValue(mock_tweets); const result = await service.getForYouPosts('user-456'); - expect(mock_category_repository.find).toHaveBeenCalled(); + expect(mock_category_query_builder.getMany).toHaveBeenCalled(); + expect(mock_category_query_builder.where).toHaveBeenCalledWith('c.id IN (:...ids)', { + ids: [2, 3, 5, 4, 15], + }); expect(result).toHaveLength(2); }); + it('should fill remaining slots with default categories when user has partial interests', async () => { + const user_id = 'user-789'; + const mock_interests = [ + { category: { id: 21, name: 'Sports' }, score: 100 }, + { category: { id: 20, name: 'Tech' }, score: 90 }, + ]; + const mock_default_cats = [ + { id: 2, name: 'Category2' }, + { id: 3, name: 'Category3' }, + { id: 5, name: 'Category5' }, + ]; + const mock_tweet_ids = [ + ['tweet-1'], + ['tweet-2'], + ['tweet-3'], + ['tweet-4'], + ['tweet-5'], + ]; + const mock_tweets = [ + { tweet_id: 'tweet-1', content: 'test1' }, + { tweet_id: 'tweet-2', content: 'test2' }, + { tweet_id: 'tweet-3', content: 'test3' }, + { tweet_id: 'tweet-4', content: 'test4' }, + { tweet_id: 'tweet-5', content: 'test5' }, + ]; + + const mock_query_builder = { + innerJoinAndSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + limit: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue(mock_interests), + }; + + mock_user_interests_repository.createQueryBuilder.mockReturnValue(mock_query_builder); + mock_category_query_builder.getMany.mockResolvedValue(mock_default_cats); + mock_category_query_builder.andWhere.mockReturnThis(); + mock_redis_service.zrevrangeMultiple.mockResolvedValue(mock_tweet_ids); + mock_tweets_service.getTweetsByIds.mockResolvedValue(mock_tweets); + + const result = await service.getForYouPosts(user_id); + + // Should call andWhere because existing_ids.length > 0 + expect(mock_category_query_builder.andWhere).toHaveBeenCalledWith( + 'c.id NOT IN (:...existing_ids)', + { existing_ids: [21, 20] } + ); + expect(mock_category_query_builder.limit).toHaveBeenCalledWith(3); // needed = 5 - 2 + expect(result.length).toBeGreaterThan(0); + }); + + it('should NOT call andWhere when user has zero interests (existing_ids.length === 0)', async () => { + const user_id = 'user-no-interests'; + const mock_default_cats = [ + { id: 2, name: 'Category2' }, + { id: 3, name: 'Category3' }, + { id: 5, name: 'Category5' }, + { id: 4, name: 'Category4' }, + { id: 15, name: 'Category15' }, + ]; + const mock_tweet_ids = [['tweet-1'], [], [], [], []]; + const mock_tweets = [{ tweet_id: 'tweet-1', content: 'test1' }]; + + const mock_query_builder = { + innerJoinAndSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + limit: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([]), + }; + + mock_user_interests_repository.createQueryBuilder.mockReturnValue(mock_query_builder); + mock_category_query_builder.andWhere.mockClear(); + mock_category_query_builder.getMany.mockResolvedValue(mock_default_cats); + mock_redis_service.zrevrangeMultiple.mockResolvedValue(mock_tweet_ids); + mock_tweets_service.getTweetsByIds.mockResolvedValue(mock_tweets); + + await service.getForYouPosts(user_id); + + // andWhere should NOT be called because existing_ids.length === 0 + expect(mock_category_query_builder.andWhere).not.toHaveBeenCalled(); + expect(mock_category_query_builder.limit).toHaveBeenCalledWith(5); // needed = 5 - 0 + }); + it('should use default categories when no user_id provided', async () => { - const mock_default_cats = [{ id: 21, name: 'Sports' }]; - const mock_tweet_ids = [['tweet-1']]; + const mock_default_cats = [ + { id: 2, name: 'Category2' }, + { id: 3, name: 'Category3' }, + { id: 5, name: 'Category5' }, + { id: 4, name: 'Category4' }, + { id: 15, name: 'Category15' }, + ]; + const mock_tweet_ids = [['tweet-1'], [], [], [], []]; - mock_category_repository.find.mockResolvedValue(mock_default_cats); + mock_category_query_builder.getMany.mockResolvedValue(mock_default_cats); mock_redis_service.zrevrangeMultiple.mockResolvedValue(mock_tweet_ids); mock_tweets_service.getTweetsByIds.mockResolvedValue([{ tweet_id: 'tweet-1' }]); const result = await service.getForYouPosts(); - expect(mock_category_repository.find).toHaveBeenCalled(); + expect(mock_category_query_builder.getMany).toHaveBeenCalled(); + expect(mock_category_query_builder.andWhere).not.toHaveBeenCalled(); }); it('should return empty array when no tweets found', async () => { @@ -461,6 +555,7 @@ describe('ExploreService', () => { mock_user_interests_repository.createQueryBuilder.mockReturnValue(mock_query_builder); mock_category_repository.find.mockResolvedValue([{ id: 21, name: 'Sports' }]); + mock_category_query_builder.getMany.mockResolvedValue([{ id: 21, name: 'Sports' }]); mock_redis_service.zrevrangeMultiple.mockResolvedValue([[]]); const result = await service.getForYouPosts('user-123'); @@ -492,5 +587,89 @@ describe('ExploreService', () => { expect(result).toHaveLength(1); expect(result[0].category.id).toBe(21); }); + + it('should handle user with exactly 5 interests (no default categories needed)', async () => { + const user_id = 'user-full-interests'; + const mock_interests = [ + { category: { id: 21, name: 'Sports' }, score: 100 }, + { category: { id: 20, name: 'Tech' }, score: 90 }, + { category: { id: 19, name: 'Music' }, score: 80 }, + { category: { id: 18, name: 'Gaming' }, score: 70 }, + { category: { id: 17, name: 'News' }, score: 60 }, + ]; + const mock_tweet_ids = [ + ['tweet-1'], + ['tweet-2'], + ['tweet-3'], + ['tweet-4'], + ['tweet-5'], + ]; + const mock_tweets = [ + { tweet_id: 'tweet-1', content: 'test1' }, + { tweet_id: 'tweet-2', content: 'test2' }, + { tweet_id: 'tweet-3', content: 'test3' }, + { tweet_id: 'tweet-4', content: 'test4' }, + { tweet_id: 'tweet-5', content: 'test5' }, + ]; + + const mock_query_builder = { + innerJoinAndSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + limit: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue(mock_interests), + }; + + mock_user_interests_repository.createQueryBuilder.mockReturnValue(mock_query_builder); + mock_redis_service.zrevrangeMultiple.mockResolvedValue(mock_tweet_ids); + mock_tweets_service.getTweetsByIds.mockResolvedValue(mock_tweets); + + const result = await service.getForYouPosts(user_id); + + // Should NOT call category_repository because categories.length === 5 + expect(mock_category_query_builder.getMany).not.toHaveBeenCalled(); + expect(result).toHaveLength(5); + }); + + it('should handle multiple tweets in feed_structure correctly', async () => { + const user_id = 'user-multi-tweets'; + const mock_interests = [ + { category: { id: 21, name: 'Sports' }, score: 100 }, + { category: { id: 20, name: 'Tech' }, score: 90 }, + ]; + const mock_tweet_ids = [ + ['tweet-1', 'tweet-2', 'tweet-3'], + ['tweet-4', 'tweet-5'], + ]; + const mock_tweets = [ + { tweet_id: 'tweet-1', content: 'test1' }, + { tweet_id: 'tweet-2', content: 'test2' }, + { tweet_id: 'tweet-3', content: 'test3' }, + { tweet_id: 'tweet-4', content: 'test4' }, + { tweet_id: 'tweet-5', content: 'test5' }, + ]; + + const mock_query_builder = { + innerJoinAndSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + limit: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue(mock_interests), + }; + + mock_user_interests_repository.createQueryBuilder.mockReturnValue(mock_query_builder); + mock_redis_service.zrevrangeMultiple.mockResolvedValue(mock_tweet_ids); + mock_tweets_service.getTweetsByIds.mockResolvedValue(mock_tweets); + + const result = await service.getForYouPosts(user_id); + + expect(result).toHaveLength(2); + expect(result[0].tweets).toHaveLength(3); + expect(result[1].tweets).toHaveLength(2); + expect(mock_tweets_service.getTweetsByIds).toHaveBeenCalledWith( + expect.arrayContaining(['tweet-1', 'tweet-2', 'tweet-3', 'tweet-4', 'tweet-5']), + user_id + ); + }); }); }); diff --git a/src/explore/explore.service.ts b/src/explore/explore.service.ts index f0db66a6..8dd6ae61 100644 --- a/src/explore/explore.service.ts +++ b/src/explore/explore.service.ts @@ -4,24 +4,26 @@ import { In, Repository } from 'typeorm'; import { RedisService } from '../redis/redis.service'; import { Category } from '../category/entities/category.entity'; import { TweetsService } from '../tweets/tweets.service'; -import { UserRepository } from '../user/user.repository'; import { UserInterests } from 'src/user/entities/user-interests.entity'; import { TrendService } from '../trend/trend.service'; +import { WhoToFollowService } from './who-to-follow.service'; @Injectable() export class ExploreService { + /* c8 ignore start */ constructor( private readonly redis_service: RedisService, @InjectRepository(Category) private readonly category_repository: Repository, @InjectRepository(UserInterests) private readonly user_interests_repository: Repository, - private readonly user_repository: UserRepository, private readonly tweets_service: TweetsService, - private readonly trend_service: TrendService + private readonly trend_service: TrendService, + private readonly who_to_follow_service: WhoToFollowService ) {} + /* c8 ignore stop */ - private readonly DEFAULT_CATEGORIES = [21, 20, 3, 4, 5]; + private readonly DEFAULT_CATEGORIES = [2, 3, 5, 4, 15]; async getExploreData(current_user_id?: string) { // This method would fetch all explore data in one go @@ -29,7 +31,7 @@ export class ExploreService { const [trending, who_to_follow, for_you] = await Promise.all([ this.trend_service.getTrending('global', 5), - this.getWhoToFollow(current_user_id, 3), + this.who_to_follow_service.getWhoToFollow(current_user_id, 30), this.getForYouPosts(current_user_id), ]); @@ -41,54 +43,7 @@ export class ExploreService { } async getWhoToFollow(current_user_id?: string, limit: number = 30) { - const query = this.user_repository - .createQueryBuilder('user') - .select([ - 'user.id', - 'user.username', - 'user.name', - 'user.bio', - 'user.avatar_url', - 'user.verified', - 'user.followers', - 'user.following', - ]) - .orderBy('RANDOM()') - .limit(limit); - - if (current_user_id) { - query - .addSelect( - `EXISTS( - SELECT 1 FROM user_follows uf - WHERE uf.follower_id = :current_user_id AND uf.followed_id = "user"."id" - )`, - 'is_following' - ) - .addSelect( - `EXISTS( - SELECT 1 FROM user_follows uf - WHERE uf.follower_id = "user"."id" AND uf.followed_id = :current_user_id - )`, - 'is_followed' - ) - .setParameter('current_user_id', current_user_id); - } - - const users = await query.getRawMany(); - - return users.map((user) => ({ - id: user.user_id, - username: user.user_username, - name: user.user_name, - bio: user.user_bio || '', - avatar_url: user.user_avatar_url || '', - verified: user.user_verified || false, - followers: user.user_followers || 0, - following: user.user_following || 0, - is_following: user.is_following || false, - is_followed: user.is_followed || false, - })); + return this.who_to_follow_service.getWhoToFollow(current_user_id, limit); } async getCategoryTrending( @@ -98,7 +53,7 @@ export class ExploreService { limit: number = 20 ) { const category = await this.category_repository.findOne({ - where: { id: parseInt(category_id) }, + where: { id: Number.parseInt(category_id) }, }); if (!category) { @@ -166,12 +121,23 @@ export class ExploreService { console.log('Time taken to fetch user interests:', time_after - time_before, 'ms'); const categories = user_interests.map((interest) => interest.category); - if (categories.length === 0) { - // If no user interests, use default categories - const default_cats = await this.category_repository.find({ - where: { id: In(this.DEFAULT_CATEGORIES) }, - }); - categories.push(...default_cats); + + if (categories.length < 5) { + // Fill remaining slots with default categories + const existing_ids = categories.map((cat) => cat.id); + const needed = 5 - categories.length; + const qb = this.category_repository + .createQueryBuilder('c') + .where('c.id IN (:...ids)', { ids: this.DEFAULT_CATEGORIES }) + .orderBy('c.id', 'ASC') + .limit(needed); + + if (existing_ids.length > 0) { + qb.andWhere('c.id NOT IN (:...existing_ids)', { existing_ids }); + } + + const filler_cats = await qb.getMany(); + categories.push(...filler_cats); } const keys = categories.map((cat) => `explore:category:${cat.id}`); const results = await this.redis_service.zrevrangeMultiple(keys, 0, 4); @@ -191,6 +157,7 @@ export class ExploreService { all_tweet_ids.add(tweet_id); }); + /* istanbul ignore next */ if (tweets.length > 0) { feed_structure.push({ category: categories[index].name, diff --git a/src/explore/who-to-follow.service.spec.ts b/src/explore/who-to-follow.service.spec.ts new file mode 100644 index 00000000..7bb43161 --- /dev/null +++ b/src/explore/who-to-follow.service.spec.ts @@ -0,0 +1,749 @@ +import { Test, TestingModule } from '@nestjs/testing'; +import { WhoToFollowService } from './who-to-follow.service'; +import { UserRepository } from '../user/user.repository'; + +describe('WhoToFollowService', () => { + let service: WhoToFollowService; + let user_repository: UserRepository; + + beforeEach(async () => { + const module: TestingModule = await Test.createTestingModule({ + providers: [ + WhoToFollowService, + { + provide: UserRepository, + useValue: { + createQueryBuilder: jest.fn(), + query: jest.fn(), + }, + }, + ], + }).compile(); + + service = module.get(WhoToFollowService); + user_repository = module.get(UserRepository); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should be defined', () => { + expect(service).toBeDefined(); + }); + + describe('getWhoToFollow', () => { + it('should return popular users for non-authenticated users', async () => { + const mock_users = [ + { + id: 'user-1', + username: 'user1', + name: 'User 1', + bio: 'Bio 1', + avatar_url: 'avatar1.jpg', + verified: true, + followers: 1000, + following: 100, + }, + { + id: 'user-2', + username: 'user2', + name: 'User 2', + bio: 'Bio 2', + avatar_url: 'avatar2.jpg', + verified: false, + followers: 500, + following: 50, + }, + ]; + + const mock_query_builder = { + select: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + andWhere: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + addOrderBy: jest.fn().mockReturnThis(), + limit: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue(mock_users), + }; + + jest.spyOn(user_repository, 'createQueryBuilder').mockReturnValue( + mock_query_builder as any + ); + + const result = await service.getWhoToFollow(undefined, 2); + + expect(result).toHaveLength(2); + expect(result[0].id).toBe('user-1'); + expect(result[0].username).toBe('user1'); + expect(result[0].is_following).toBe(false); + expect(result[0].is_followed).toBe(false); + expect(result[1].id).toBe('user-2'); + }); + + it('should handle null/undefined user fields in popular users', async () => { + const mock_users = [ + { + id: 'user-1', + username: 'user1', + name: 'User 1', + bio: null, + avatar_url: null, + verified: null, + followers: null, + following: null, + }, + ]; + + const mock_query_builder = { + select: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + andWhere: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + addOrderBy: jest.fn().mockReturnThis(), + limit: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue(mock_users), + }; + + jest.spyOn(user_repository, 'createQueryBuilder').mockReturnValue( + mock_query_builder as any + ); + + const result = await service.getWhoToFollow(undefined, 1); + + expect(result).toHaveLength(1); + expect(result[0].bio).toBe(''); + expect(result[0].avatar_url).toBe(''); + expect(result[0].verified).toBe(false); + expect(result[0].followers).toBe(0); + expect(result[0].following).toBe(0); + }); + + it('should return personalized recommendations for authenticated users', async () => { + const user_id = 'current-user-123'; + + // Mock query responses for all 5 sources + const mock_fof_users = [{ user_id: 'fof-1', mutual_count: 5 }]; + const mock_interest_users = [ + { user_id: 'interest-1', common_categories: 3, avg_interest_score: 80 }, + ]; + const mock_liked_users = [{ user_id: 'liked-1', like_count: 10 }]; + const mock_replied_users = [{ user_id: 'replied-1', reply_count: 3 }]; + const mock_followers_users = [{ user_id: 'follower-1' }]; + + jest.spyOn(user_repository, 'query') + .mockResolvedValueOnce(mock_fof_users) + .mockResolvedValueOnce(mock_interest_users) + .mockResolvedValueOnce(mock_liked_users) + .mockResolvedValueOnce(mock_replied_users) + .mockResolvedValueOnce(mock_followers_users); + + const mock_final_users = [ + { + user_id: 'fof-1', + user_username: 'fofuser', + user_name: 'FoF User', + user_bio: 'Bio', + user_avatar_url: 'avatar.jpg', + user_verified: false, + user_followers: 100, + user_following: 50, + is_following: false, + is_followed: false, + }, + { + user_id: 'interest-1', + user_username: 'interestuser', + user_name: 'Interest User', + user_bio: 'Bio', + user_avatar_url: 'avatar.jpg', + user_verified: false, + user_followers: 100, + user_following: 50, + is_following: false, + is_followed: false, + }, + ]; + + const mock_query_builder = { + select: jest.fn().mockReturnThis(), + addSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + andWhere: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + addOrderBy: jest.fn().mockReturnThis(), + limit: jest.fn().mockReturnThis(), + setParameter: jest.fn().mockReturnThis(), + getRawMany: jest.fn().mockResolvedValue(mock_final_users), + getMany: jest.fn().mockResolvedValue([]), + }; + + jest.spyOn(user_repository, 'createQueryBuilder').mockReturnValue( + mock_query_builder as any + ); + + const result = await service.getWhoToFollow(user_id, 10); + + expect(Array.isArray(result)).toBe(true); + expect(result.length).toBeGreaterThan(0); + expect(user_repository.query).toHaveBeenCalledTimes(5); // 5 sources + }); + + it('should exclude followed users from popular users backfill', async () => { + const user_id = 'current-user-123'; + + // Mock minimal responses from all sources (only 1 user) + jest.spyOn(user_repository, 'query') + .mockResolvedValueOnce([{ user_id: 'user-1', mutual_count: 1 }]) + .mockResolvedValueOnce([]) + .mockResolvedValueOnce([]) + .mockResolvedValueOnce([]) + .mockResolvedValueOnce([]); + + const mock_recommended_users = [ + { + user_id: 'user-1', + user_username: 'user1', + user_name: 'User 1', + user_bio: '', + user_avatar_url: '', + user_verified: false, + user_followers: 10, + user_following: 5, + is_following: false, + is_followed: false, + }, + ]; + + const mock_popular_users = [ + { + id: 'popular-1', + username: 'popular1', + name: 'Popular User 1', + bio: '', + avatar_url: '', + verified: true, + followers: 10000, + following: 100, + }, + { + id: 'popular-2', + username: 'popular2', + name: 'Popular User 2', + bio: '', + avatar_url: '', + verified: false, + followers: 5000, + following: 200, + }, + ]; + + const mock_query_builder = { + select: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + andWhere: jest.fn().mockReturnThis(), + addSelect: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + addOrderBy: jest.fn().mockReturnThis(), + setParameter: jest.fn().mockReturnThis(), + limit: jest.fn().mockReturnThis(), + getRawMany: jest.fn().mockResolvedValue(mock_recommended_users), + getMany: jest.fn().mockResolvedValue(mock_popular_users), + }; + + jest.spyOn(user_repository, 'createQueryBuilder').mockReturnValue( + mock_query_builder as any + ); + + const result = await service.getWhoToFollow(user_id, 5); + + // Verify that andWhere was called to filter out followed users + expect(mock_query_builder.andWhere).toHaveBeenCalledWith( + 'user.id != :current_user_id', + { current_user_id: user_id } + ); + + // Verify that andWhere was called to exclude followed users + const and_where_calls = mock_query_builder.andWhere.mock.calls; + expect(and_where_calls.length).toBeGreaterThan(1); + const follows_filter_call = and_where_calls.find((call: any[]) => + call[0].includes('user_follows') + ); + expect(follows_filter_call).toBeDefined(); + }); + + it('should backfill with popular users if recommendations are insufficient', async () => { + const user_id = 'current-user-123'; + + // Mock minimal responses from all sources (only 2 users) + jest.spyOn(user_repository, 'query') + .mockResolvedValueOnce([{ user_id: 'user-1', mutual_count: 1 }]) + .mockResolvedValueOnce([]) + .mockResolvedValueOnce([]) + .mockResolvedValueOnce([]) + .mockResolvedValueOnce([{ user_id: 'user-2' }]); + + const mock_recommended_users = [ + { + user_id: 'user-1', + user_username: 'user1', + user_name: 'User 1', + user_bio: '', + user_avatar_url: '', + user_verified: false, + user_followers: 10, + user_following: 5, + is_following: false, + is_followed: false, + }, + { + user_id: 'user-2', + user_username: 'user2', + user_name: 'User 2', + user_bio: '', + user_avatar_url: '', + user_verified: false, + user_followers: 20, + user_following: 10, + is_following: false, + is_followed: false, + }, + ]; + + const mock_popular_users = [ + { + id: 'popular-1', + username: 'popular1', + name: 'Popular User 1', + bio: '', + avatar_url: '', + verified: true, + followers: 10000, + following: 100, + }, + { + id: 'popular-2', + username: 'popular2', + name: 'Popular User 2', + bio: '', + avatar_url: '', + verified: false, + followers: 5000, + following: 200, + }, + ]; + + const mock_query_builder = { + select: jest.fn().mockReturnThis(), + addSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + andWhere: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + addOrderBy: jest.fn().mockReturnThis(), + setParameter: jest.fn().mockReturnThis(), + limit: jest.fn().mockReturnThis(), + getRawMany: jest.fn().mockResolvedValue(mock_recommended_users), + getMany: jest.fn().mockResolvedValue(mock_popular_users), + }; + + jest.spyOn(user_repository, 'createQueryBuilder').mockReturnValue( + mock_query_builder as any + ); + + const result = await service.getWhoToFollow(user_id, 5); + + expect(result.length).toBe(4); // 2 from recommendations + 2 from popular + }); + + it('should handle null/undefined fields in personalized recommendations', async () => { + const user_id = 'current-user-test'; + + jest.spyOn(user_repository, 'query') + .mockResolvedValueOnce([{ user_id: 'user-1', mutual_count: 5 }]) + .mockResolvedValueOnce([]) + .mockResolvedValueOnce([]) + .mockResolvedValueOnce([]) + .mockResolvedValueOnce([]); + + const mock_users_with_nulls = [ + { + user_id: 'user-1', + user_username: 'user1', + user_name: 'User 1', + user_bio: null, + user_avatar_url: null, + user_verified: null, + user_followers: null, + user_following: null, + is_following: null, + is_followed: null, + }, + ]; + + const mock_query_builder = { + select: jest.fn().mockReturnThis(), + addSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + andWhere: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + addOrderBy: jest.fn().mockReturnThis(), + setParameter: jest.fn().mockReturnThis(), + limit: jest.fn().mockReturnThis(), + getRawMany: jest.fn().mockResolvedValue(mock_users_with_nulls), + getMany: jest.fn().mockResolvedValue([]), + }; + + jest.spyOn(user_repository, 'createQueryBuilder').mockReturnValue( + mock_query_builder as any + ); + + const result = await service.getWhoToFollow(user_id, 10); + + expect(result).toHaveLength(1); + expect(result[0].bio).toBe(''); + expect(result[0].avatar_url).toBe(''); + expect(result[0].verified).toBe(false); + expect(result[0].followers).toBe(0); + expect(result[0].following).toBe(0); + expect(result[0].is_following).toBe(false); + expect(result[0].is_followed).toBe(false); + }); + }); + + describe('Distribution Logic', () => { + it('should correctly distribute users according to percentages', async () => { + const user_id = 'current-user-123'; + + // Mock responses with enough users from each source + const mock_fof_users = Array.from({ length: 20 }, (_, i) => ({ + user_id: `fof-${i}`, + mutual_count: 5, + })); + const mock_interest_users = Array.from({ length: 10 }, (_, i) => ({ + user_id: `interest-${i}`, + common_categories: 3, + avg_interest_score: 80, + })); + const mock_liked_users = Array.from({ length: 15 }, (_, i) => ({ + user_id: `liked-${i}`, + like_count: 10, + })); + const mock_replied_users = Array.from({ length: 10 }, (_, i) => ({ + user_id: `replied-${i}`, + reply_count: 3, + })); + const mock_followers_users = Array.from({ length: 10 }, (_, i) => ({ + user_id: `follower-${i}`, + })); + + jest.spyOn(user_repository, 'query') + .mockResolvedValueOnce(mock_fof_users) + .mockResolvedValueOnce(mock_interest_users) + .mockResolvedValueOnce(mock_liked_users) + .mockResolvedValueOnce(mock_replied_users) + .mockResolvedValueOnce(mock_followers_users); + + // Create mock final users for all user IDs + const all_user_ids = [ + ...mock_fof_users.map((u) => u.user_id), + ...mock_interest_users.map((u) => u.user_id), + ...mock_liked_users.map((u) => u.user_id), + ...mock_replied_users.map((u) => u.user_id), + ...mock_followers_users.map((u) => u.user_id), + ]; + + const mock_final_users = all_user_ids.map((id) => ({ + user_id: id, + user_username: `user_${id}`, + user_name: `User ${id}`, + user_bio: '', + user_avatar_url: '', + user_verified: false, + user_followers: 100, + user_following: 50, + is_following: false, + is_followed: false, + })); + + const mock_query_builder = { + select: jest.fn().mockReturnThis(), + addSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + andWhere: jest.fn().mockReturnThis(), + setParameter: jest.fn().mockReturnThis(), + getRawMany: jest.fn().mockResolvedValue(mock_final_users), + }; + + jest.spyOn(user_repository, 'createQueryBuilder').mockReturnValue( + mock_query_builder as any + ); + + const limit = 10; + const result = await service.getWhoToFollow(user_id, limit); + + // Verify correct number returned + expect(result.length).toBe(limit); + + // Distribution should be: 35% FoF (4), 15% interests (2), 20% likes (2), 15% replies (2), 15% followers (2) + // Note: Due to rounding and deduplication, exact counts may vary slightly + expect(result.length).toBeLessThanOrEqual(limit); + }); + }); + + describe('Edge Cases', () => { + it('should handle empty results from all sources', async () => { + const user_id = 'current-user-123'; + + // Mock empty responses from all sources + jest.spyOn(user_repository, 'query') + .mockResolvedValueOnce([]) + .mockResolvedValueOnce([]) + .mockResolvedValueOnce([]) + .mockResolvedValueOnce([]) + .mockResolvedValueOnce([]); + + const mock_query_builder = { + select: jest.fn().mockReturnThis(), + addSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + andWhere: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + addOrderBy: jest.fn().mockReturnThis(), + setParameter: jest.fn().mockReturnThis(), + limit: jest.fn().mockReturnThis(), + getRawMany: jest.fn().mockResolvedValue([]), + getMany: jest.fn().mockResolvedValue([ + { + id: 'popular-1', + username: 'popular', + name: 'Popular', + bio: '', + avatar_url: '', + verified: true, + followers: 1000, + following: 100, + }, + ]), + }; + + jest.spyOn(user_repository, 'createQueryBuilder').mockReturnValue( + mock_query_builder as any + ); + + const result = await service.getWhoToFollow(user_id, 5); + + // Should fallback to popular users + expect(result.length).toBeGreaterThan(0); + }); + + it('should handle limit of 1', async () => { + const user_id = 'current-user-123'; + + jest.spyOn(user_repository, 'query') + .mockResolvedValueOnce([{ user_id: 'user-1', mutual_count: 1 }]) + .mockResolvedValueOnce([]) + .mockResolvedValueOnce([]) + .mockResolvedValueOnce([]) + .mockResolvedValueOnce([]); + + const mock_query_builder = { + select: jest.fn().mockReturnThis(), + addSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + andWhere: jest.fn().mockReturnThis(), + setParameter: jest.fn().mockReturnThis(), + getRawMany: jest.fn().mockResolvedValue([ + { + user_id: 'user-1', + user_username: 'user1', + user_name: 'User 1', + user_bio: '', + user_avatar_url: '', + user_verified: false, + user_followers: 10, + user_following: 5, + is_following: false, + is_followed: false, + }, + ]), + }; + + jest.spyOn(user_repository, 'createQueryBuilder').mockReturnValue( + mock_query_builder as any + ); + + const result = await service.getWhoToFollow(user_id, 1); + + expect(result).toHaveLength(1); + }); + + it('should handle duplicate users across sources', async () => { + const user_id = 'current-user-123'; + const duplicate_user_id = 'duplicate-user'; + + // Same user appears in multiple sources + jest.spyOn(user_repository, 'query') + .mockResolvedValueOnce([{ user_id: duplicate_user_id, mutual_count: 5 }]) + .mockResolvedValueOnce([ + { user_id: duplicate_user_id, common_categories: 3, avg_interest_score: 80 }, + ]) + .mockResolvedValueOnce([{ user_id: duplicate_user_id, like_count: 10 }]) + .mockResolvedValueOnce([]) + .mockResolvedValueOnce([]); + + const mock_query_builder = { + select: jest.fn().mockReturnThis(), + addSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + andWhere: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + addOrderBy: jest.fn().mockReturnThis(), + limit: jest.fn().mockReturnThis(), + setParameter: jest.fn().mockReturnThis(), + getRawMany: jest.fn().mockResolvedValue([ + { + user_id: duplicate_user_id, + user_username: 'dupuser', + user_name: 'Duplicate User', + user_bio: '', + user_avatar_url: '', + user_verified: false, + user_followers: 100, + user_following: 50, + is_following: false, + is_followed: false, + }, + ]), + getMany: jest.fn().mockResolvedValue([]), + }; + + jest.spyOn(user_repository, 'createQueryBuilder').mockReturnValue( + mock_query_builder as any + ); + + const result = await service.getWhoToFollow(user_id, 10); + + // Should only include the user once + const user_ids = result.map((u) => u.id); + const unique_user_ids = new Set(user_ids); + expect(user_ids.length).toBe(unique_user_ids.size); + }); + }); + + describe('calculateScore', () => { + it('should calculate score for friends of friends correctly', () => { + const user = { mutual_count: 5 }; + // Access private method through any + const score = (service as any).calculateScore(user, 'fof'); + expect(score).toBe(50); // 5/10 * 100 = 50 + }); + + it('should cap friends of friends score at 100', () => { + const user = { mutual_count: 15 }; + const score = (service as any).calculateScore(user, 'fof'); + expect(score).toBe(100); + }); + + it('should calculate score for interest-based users correctly', () => { + const user = { common_categories: 1, avg_interest_score: 80 }; + const score = (service as any).calculateScore(user, 'interests'); + // (1/2 * 60) + (80/100 * 40) = 30 + 32 = 62 + expect(score).toBe(62); + }); + + it('should cap interest-based score correctly', () => { + const user = { common_categories: 5, avg_interest_score: 100 }; + const score = (service as any).calculateScore(user, 'interests'); + expect(score).toBe(100); // 60 (capped) + 40 (capped) = 100 + }); + + it('should calculate score for liked users correctly', () => { + const user = { like_count: 7 }; + const score = (service as any).calculateScore(user, 'likes'); + expect(score).toBe(70); // 7/10 * 100 = 70 + }); + + it('should cap liked users score at 100', () => { + const user = { like_count: 20 }; + const score = (service as any).calculateScore(user, 'likes'); + expect(score).toBe(100); + }); + + it('should calculate score for replied users correctly', () => { + const user = { reply_count: 3 }; + const score = (service as any).calculateScore(user, 'replies'); + expect(score).toBe(30); // 3/10 * 100 = 30 + }); + + it('should cap replied users score at 100', () => { + const user = { reply_count: 15 }; + const score = (service as any).calculateScore(user, 'replies'); + expect(score).toBe(100); + }); + + it('should return fixed score for followers', () => { + const user = {}; + const score = (service as any).calculateScore(user, 'followers'); + expect(score).toBe(50); + }); + + it('should return 0 for unknown source', () => { + const user = {}; + const score = (service as any).calculateScore(user, 'unknown' as any); + expect(score).toBe(0); + }); + }); + + describe('combineByDistribution', () => { + it('should handle empty arrays from all sources', () => { + const result = (service as any).combineByDistribution([], [], [], [], [], 10); + expect(result).toEqual([]); + }); + + it('should fill remaining slots when distribution yields fewer users', () => { + const fof_users = [{ user_id: 'user-1', mutual_count: 5 }]; + const result = (service as any).combineByDistribution(fof_users, [], [], [], [], 10); + expect(result.length).toBeLessThanOrEqual(10); + expect(result.length).toBeGreaterThan(0); + }); + + it('should deduplicate users across sources', () => { + const duplicate_id = 'duplicate-user'; + const fof_users = [{ user_id: duplicate_id, mutual_count: 5 }]; + const interest_users = [ + { user_id: duplicate_id, common_categories: 2, avg_interest_score: 80 }, + ]; + const result = (service as any).combineByDistribution( + fof_users, + interest_users, + [], + [], + [], + 10 + ); + const user_ids = result.map((u: any) => u.user_id); + const unique_ids = new Set(user_ids); + expect(user_ids.length).toBe(unique_ids.size); + }); + + it('should sort results by score descending', () => { + const fof_users = [ + { user_id: 'user-1', mutual_count: 2 }, + { user_id: 'user-2', mutual_count: 8 }, + ]; + const result = (service as any).combineByDistribution(fof_users, [], [], [], [], 10); + if (result.length > 1) { + expect(result[0].score).toBeGreaterThanOrEqual(result[1].score); + } + }); + + it('should respect the limit parameter', () => { + const fof_users = Array.from({ length: 20 }, (_, i) => ({ + user_id: `user-${i}`, + mutual_count: 5, + })); + const result = (service as any).combineByDistribution(fof_users, [], [], [], [], 5); + expect(result.length).toBeLessThanOrEqual(5); + }); + }); +}); diff --git a/src/explore/who-to-follow.service.ts b/src/explore/who-to-follow.service.ts new file mode 100644 index 00000000..28a40984 --- /dev/null +++ b/src/explore/who-to-follow.service.ts @@ -0,0 +1,541 @@ +import { Injectable } from '@nestjs/common'; +import { UserRepository } from '../user/user.repository'; + +@Injectable() +export class WhoToFollowService { + private readonly CONFIG = { + MAX_MUTUAL_CONNECTIONS_THRESHOLD: 10, + MAX_LIKES_THRESHOLD: 10, + MAX_REPLIES_THRESHOLD: 10, + MAX_COMMON_CATEGORIES_THRESHOLD: 2, + + DISTRIBUTION: { + FRIENDS_OF_FRIENDS: 40, + LIKES: 25, + INTERESTS: 10, + + REPLIES: 15, + FOLLOWERS_NOT_FOLLOWED: 10, + }, + + CANDIDATE_MULTIPLIER: 3, + }; + + constructor(private readonly user_repository: UserRepository) {} + + async getWhoToFollow(current_user_id?: string, limit: number = 30) { + if (!current_user_id) { + return this.getPopularUsers(limit); + } + + const recommendations = await this.getPersonalizedRecommendations(current_user_id, limit); + + if (recommendations.length < limit) { + const needed = limit - recommendations.length; + const existing_ids = new Set(recommendations.map((r) => r.id)); + + const additional_users = await this.getPopularUsers(needed * 2, current_user_id); + const filtered_additional = additional_users + .filter((user) => !existing_ids.has(user.id)) + .slice(0, needed); + + recommendations.push(...filtered_additional); + } + + return recommendations; + } + + private async getPopularUsers(limit: number, current_user_id?: string) { + let query = this.user_repository + .createQueryBuilder('user') + .select([ + 'user.id', + 'user.username', + 'user.name', + 'user.bio', + 'user.avatar_url', + 'user.verified', + 'user.followers', + 'user.following', + ]) + .where('user.deleted_at IS NULL'); + + if (current_user_id) { + query = query.andWhere('user.id != :current_user_id', { current_user_id }).andWhere( + `user.id NOT IN ( + SELECT followed_id FROM user_follows WHERE follower_id = :current_user_id + )`, + { current_user_id } + ); + } + + const users = await query + .orderBy('user.followers', 'DESC') + .addOrderBy('user.verified', 'DESC') + .limit(limit) + .getMany(); + + return users.map((user) => ({ + id: user.id, + username: user.username, + name: user.name, + bio: user.bio || '', + avatar_url: user.avatar_url || '', + verified: user.verified || false, + followers: user.followers || 0, + following: user.following || 0, + is_following: false, + is_followed: false, + })); + } + + private async getPersonalizedRecommendations(current_user_id: string, limit: number) { + const distribution = this.CONFIG.DISTRIBUTION; + const candidate_multiplier = this.CONFIG.CANDIDATE_MULTIPLIER; + + const limits = { + fof: Math.ceil((limit * distribution.FRIENDS_OF_FRIENDS) / 100) * candidate_multiplier, + interests: Math.ceil((limit * distribution.INTERESTS) / 100) * candidate_multiplier, + likes: Math.ceil((limit * distribution.LIKES) / 100) * candidate_multiplier, + replies: Math.ceil((limit * distribution.REPLIES) / 100) * candidate_multiplier, + followers: + Math.ceil((limit * distribution.FOLLOWERS_NOT_FOLLOWED) / 100) * + candidate_multiplier, + }; + + const [ + friends_of_friends, + interest_based, + liked_users, + replied_users, + followers_not_followed, + ] = await Promise.all([ + this.getFriendsOfFriends(current_user_id, limits.fof), + this.getInterestBasedUsers(current_user_id, limits.interests), + this.getLikedUsers(current_user_id, limits.likes), + this.getRepliedUsers(current_user_id, limits.replies), + this.getFollowersNotFollowed(current_user_id, limits.followers), + ]); + + const combined_users_with_metadata = this.combineByDistribution( + friends_of_friends, + interest_based, + liked_users, + replied_users, + followers_not_followed, + limit + ); + + if (combined_users_with_metadata.length === 0) { + return []; + } + + const user_ids = combined_users_with_metadata.map((u) => u.user_id); + + const users = await this.user_repository + .createQueryBuilder('user') + .select([ + 'user.id', + 'user.username', + 'user.name', + 'user.bio', + 'user.avatar_url', + 'user.verified', + 'user.followers', + 'user.following', + ]) + .addSelect( + `EXISTS( + SELECT 1 FROM user_follows uf + WHERE uf.follower_id = :current_user_id AND uf.followed_id = "user"."id" + )`, + 'is_following' + ) + .addSelect( + `EXISTS( + SELECT 1 FROM user_follows uf + WHERE uf.follower_id = "user"."id" AND uf.followed_id = :current_user_id + )`, + 'is_followed' + ) + .where('user.id IN (:...user_ids)', { user_ids }) + .andWhere('user.deleted_at IS NULL') + .setParameter('current_user_id', current_user_id) + .getRawMany(); + + const user_map = new Map(users.map((u) => [u.user_id, u])); + + const users_with_scores = combined_users_with_metadata + .map((metadata) => { + const user = user_map.get(metadata.user_id); + if (!user) return null; + return { + user, + score: metadata.score, + source: metadata.source, + source_data: metadata.source_data, + }; + }) + .filter((u) => u !== null); + + return users_with_scores.map((item) => ({ + id: item.user.user_id, + username: item.user.user_username, + name: item.user.user_name, + bio: item.user.user_bio || '', + avatar_url: item.user.user_avatar_url || '', + verified: item.user.user_verified || false, + followers: item.user.user_followers || 0, + following: item.user.user_following || 0, + is_following: item.user.is_following || false, + is_followed: item.user.is_followed || false, + })); + } + + private calculateScore( + user: any, + source: 'fof' | 'interests' | 'likes' | 'replies' | 'followers' + ): number { + const thresholds = this.CONFIG; + + switch (source) { + case 'fof': { + const normalized = Math.min( + (user.mutual_count / thresholds.MAX_MUTUAL_CONNECTIONS_THRESHOLD) * 100, + 100 + ); + return normalized; + } + + case 'interests': { + const category_score = Math.min( + (user.common_categories / thresholds.MAX_COMMON_CATEGORIES_THRESHOLD) * 60, + 60 + ); + const interest_score = Math.min((user.avg_interest_score / 100) * 40, 40); + return category_score + interest_score; + } + + case 'likes': { + const normalized = Math.min( + (user.like_count / thresholds.MAX_LIKES_THRESHOLD) * 100, + 100 + ); + return normalized; + } + + case 'replies': { + const normalized = Math.min( + (user.reply_count / thresholds.MAX_REPLIES_THRESHOLD) * 100, + 100 + ); + return normalized; + } + + case 'followers': { + return 50; + } + + default: + return 0; + } + } + + private combineByDistribution( + fof_users: Array<{ user_id: string; mutual_count: number }>, + interest_users: Array<{ + user_id: string; + common_categories: number; + avg_interest_score: number; + }>, + liked_users: Array<{ user_id: string; like_count: number }>, + replied_users: Array<{ user_id: string; reply_count: number }>, + followers_users: Array<{ user_id: string }>, + limit: number + ): Array<{ user_id: string; score: number; source: string; source_data: any }> { + const distribution = this.CONFIG.DISTRIBUTION; + const scored_users: Array<{ + user_id: string; + score: number; + source: string; + source_data: any; + }> = [ + ...fof_users.map((u) => ({ + user_id: u.user_id, + score: this.calculateScore(u, 'fof'), + source: 'Friends of Friends', + source_data: u, + })), + ...interest_users.map((u) => ({ + user_id: u.user_id, + score: this.calculateScore(u, 'interests'), + source: 'Interest-Based', + source_data: u, + })), + ...liked_users.map((u) => ({ + user_id: u.user_id, + score: this.calculateScore(u, 'likes'), + source: 'Liked Users', + source_data: u, + })), + ...replied_users.map((u) => ({ + user_id: u.user_id, + score: this.calculateScore(u, 'replies'), + source: 'Replied Users', + source_data: u, + })), + ...followers_users.map((u) => ({ + user_id: u.user_id, + score: this.calculateScore(u, 'followers'), + source: 'Followers Not Followed', + source_data: u, + })), + ]; + + const by_source = { + fof: scored_users.filter((u) => u.source === 'Friends of Friends'), + interests: scored_users.filter((u) => u.source === 'Interest-Based'), + likes: scored_users.filter((u) => u.source === 'Liked Users'), + replies: scored_users.filter((u) => u.source === 'Replied Users'), + followers: scored_users.filter((u) => u.source === 'Followers Not Followed'), + }; + + const counts = { + fof: Math.ceil((limit * distribution.FRIENDS_OF_FRIENDS) / 100), + interests: Math.ceil((limit * distribution.INTERESTS) / 100), + likes: Math.ceil((limit * distribution.LIKES) / 100), + replies: Math.ceil((limit * distribution.REPLIES) / 100), + followers: Math.ceil((limit * distribution.FOLLOWERS_NOT_FOLLOWED) / 100), + }; + + const result: Array<{ user_id: string; score: number; source: string; source_data: any }> = + []; + const seen = new Set(); + + const add_from_source = (users: any[], count: number) => { + let added = 0; + for (const user of users) { + if (added >= count) break; + if (!seen.has(user.user_id)) { + result.push(user); + seen.add(user.user_id); + added++; + } + } + return added; + }; + + const actual_counts = { + fof: add_from_source(by_source.fof, counts.fof), + interests: add_from_source(by_source.interests, counts.interests), + likes: add_from_source(by_source.likes, counts.likes), + replies: add_from_source(by_source.replies, counts.replies), + followers: add_from_source(by_source.followers, counts.followers), + }; + + console.log( + `\nActual distribution: FoF=${actual_counts.fof}, Interests=${actual_counts.interests}, Likes=${actual_counts.likes}, Replies=${actual_counts.replies}, Followers=${actual_counts.followers}` + ); + + if (result.length < limit) { + const all_remaining = scored_users + .filter((u) => !seen.has(u.user_id)) + .sort((a, b) => b.score - a.score); + + for (const user of all_remaining) { + if (result.length >= limit) break; + result.push({ + ...user, + source: `${user.source} (extra)`, + }); + seen.add(user.user_id); + } + } + + result.sort((a, b) => b.score - a.score); + + return result.slice(0, limit); + } + + private async getFriendsOfFriends(current_user_id: string, limit: number) { + const result = await this.user_repository.query( + ` + WITH user_following AS ( + SELECT followed_id + FROM user_follows + WHERE follower_id = $1 + ), + user_blocks AS ( + SELECT blocked_id FROM user_blocks WHERE blocker_id = $1 + UNION + SELECT blocker_id FROM user_blocks WHERE blocked_id = $1 + ) + SELECT + uf2.followed_id as user_id, + COUNT(DISTINCT uf2.follower_id) as mutual_count + FROM user_follows uf2 + WHERE uf2.follower_id IN (SELECT followed_id FROM user_following) + AND uf2.followed_id != $1 + AND uf2.followed_id NOT IN (SELECT followed_id FROM user_following) + AND uf2.followed_id NOT IN (SELECT blocked_id FROM user_blocks) + GROUP BY uf2.followed_id + ORDER BY mutual_count DESC + LIMIT $2 + `, + [current_user_id, limit] + ); + + return result.map((r) => ({ + user_id: r.user_id, + mutual_count: Number.parseInt(r.mutual_count), + })); + } + + private async getInterestBasedUsers(current_user_id: string, limit: number) { + const result = await this.user_repository.query( + ` + WITH user_categories AS ( + SELECT category_id, score + FROM user_interests + WHERE user_id = $1 + ), + user_following AS ( + SELECT followed_id + FROM user_follows + WHERE follower_id = $1 + ), + user_blocks AS ( + SELECT blocked_id FROM user_blocks WHERE blocker_id = $1 + UNION + SELECT blocker_id FROM user_blocks WHERE blocked_id = $1 + ) + SELECT + ui.user_id, + COUNT(DISTINCT ui.category_id) as common_categories, + AVG(ui.score) as avg_interest_score + FROM user_interests ui + INNER JOIN user_categories uc ON ui.category_id = uc.category_id + WHERE ui.user_id != $1 + AND ui.user_id NOT IN (SELECT followed_id FROM user_following) + AND ui.user_id NOT IN (SELECT blocked_id FROM user_blocks) + GROUP BY ui.user_id + HAVING COUNT(DISTINCT ui.category_id) >= 1 + ORDER BY common_categories DESC, avg_interest_score DESC + LIMIT $2 + `, + [current_user_id, limit] + ); + + return result.map((r) => ({ + user_id: r.user_id, + common_categories: Number.parseInt(r.common_categories), + avg_interest_score: Number.parseFloat(r.avg_interest_score), + })); + } + + private async getLikedUsers(current_user_id: string, limit: number) { + const result = await this.user_repository.query( + ` + WITH user_following AS ( + SELECT followed_id + FROM user_follows + WHERE follower_id = $1 + ), + user_blocks AS ( + SELECT blocked_id FROM user_blocks WHERE blocker_id = $1 + UNION + SELECT blocker_id FROM user_blocks WHERE blocked_id = $1 + ) + SELECT + t.user_id, + COUNT(DISTINCT tl.tweet_id) as like_count, + MAX(tl.created_at) as last_interaction + FROM tweet_likes tl + INNER JOIN tweets t ON tl.tweet_id = t.tweet_id + WHERE tl.user_id = $1 + AND t.user_id != $1 + AND t.user_id NOT IN (SELECT followed_id FROM user_following) + AND t.user_id NOT IN (SELECT blocked_id FROM user_blocks) + GROUP BY t.user_id + ORDER BY like_count DESC, last_interaction DESC + LIMIT $2 + `, + [current_user_id, limit] + ); + + return result.map((r) => ({ + user_id: r.user_id, + like_count: Number.parseInt(r.like_count), + })); + } + + private async getRepliedUsers(current_user_id: string, limit: number) { + const result = await this.user_repository.query( + ` + WITH user_following AS ( + SELECT followed_id + FROM user_follows + WHERE follower_id = $1 + ), + user_blocks AS ( + SELECT blocked_id FROM user_blocks WHERE blocker_id = $1 + UNION + SELECT blocker_id FROM user_blocks WHERE blocked_id = $1 + ) + SELECT + parent.user_id, + COUNT(DISTINCT reply.tweet_id) as reply_count, + MAX(reply.created_at) as last_interaction + FROM tweets reply + INNER JOIN tweet_replies tr ON reply.tweet_id = tr.reply_tweet_id + INNER JOIN tweets parent ON tr.original_tweet_id = parent.tweet_id + WHERE reply.user_id = $1 + AND parent.user_id != $1 + AND parent.user_id NOT IN (SELECT followed_id FROM user_following) + AND parent.user_id NOT IN (SELECT blocked_id FROM user_blocks) + GROUP BY parent.user_id + ORDER BY reply_count DESC, last_interaction DESC + LIMIT $2 + `, + [current_user_id, limit] + ); + + return result.map((r) => ({ + user_id: r.user_id, + reply_count: Number.parseInt(r.reply_count), + })); + } + + private async getFollowersNotFollowed(current_user_id: string, limit: number) { + const result = await this.user_repository.query( + ` + WITH user_following AS ( + SELECT followed_id + FROM user_follows + WHERE follower_id = $1 + ), + user_blocks AS ( + SELECT blocked_id FROM user_blocks WHERE blocker_id = $1 + UNION + SELECT blocker_id FROM user_blocks WHERE blocked_id = $1 + ) + SELECT + uf.follower_id as user_id, + u.followers, + u.verified + FROM user_follows uf + INNER JOIN "user" u ON u.id = uf.follower_id + WHERE uf.followed_id = $1 + AND uf.follower_id NOT IN (SELECT followed_id FROM user_following) + AND uf.follower_id NOT IN (SELECT blocked_id FROM user_blocks) + ORDER BY u.verified DESC, u.followers DESC + LIMIT $2 + `, + [current_user_id, limit] + ); + + return result.map((r) => ({ + user_id: r.user_id, + })); + } +} diff --git a/src/fcm/fcm.controller.spec.ts b/src/expo/expo.controller.spec.ts similarity index 89% rename from src/fcm/fcm.controller.spec.ts rename to src/expo/expo.controller.spec.ts index 4263e2bd..922de092 100644 --- a/src/fcm/fcm.controller.spec.ts +++ b/src/expo/expo.controller.spec.ts @@ -1,6 +1,6 @@ import { Test, TestingModule } from '@nestjs/testing'; -import { FcmController } from './fcm.controller'; -import { FCMService } from './fcm.service'; +import { FcmController } from './expo.controller'; +import { FCMService } from './expo.service'; describe('FcmController', () => { let controller: FcmController; diff --git a/src/fcm/fcm.controller.ts b/src/expo/expo.controller.ts similarity index 82% rename from src/fcm/fcm.controller.ts rename to src/expo/expo.controller.ts index d2c27894..14f013dd 100644 --- a/src/fcm/fcm.controller.ts +++ b/src/expo/expo.controller.ts @@ -8,45 +8,47 @@ import { Req, UseGuards, } from '@nestjs/common'; -import { FCMService } from './fcm.service'; +import { FCMService } from './expo.service'; import { ApiBadRequestResponse, ApiBearerAuth, ApiBody, ApiOkResponse, ApiOperation, - ApiResponse, ApiTags, ApiUnauthorizedResponse, } from '@nestjs/swagger'; -import { AuthGuard } from '@nestjs/passport'; -import { register_device_token_swagger, remove_device_token_swagger } from './fcm.swagger'; +import { register_device_token_swagger, remove_device_token_swagger } from './expo.swagger'; +import { JwtAuthGuard } from 'src/auth/guards/jwt.guard'; -@ApiTags('FCM - Push Notifications') -@ApiBearerAuth() -@UseGuards(AuthGuard('jwt')) +@ApiTags('Expo - Push Notifications') +@ApiBearerAuth('JWT-auth') @Controller('fcm') export class FcmController { constructor(private readonly fcm_service: FCMService) {} - @Post('token') @HttpCode(HttpStatus.OK) @ApiOperation(register_device_token_swagger.operation) @ApiBody(register_device_token_swagger.body) @ApiOkResponse(register_device_token_swagger.responses.success) @ApiBadRequestResponse(register_device_token_swagger.responses.badRequest) @ApiUnauthorizedResponse(register_device_token_swagger.responses.unauthorized) + @ApiBearerAuth('JWT-auth') + @UseGuards(JwtAuthGuard) + @Post('token') async registerDeviceToken(@Req() req: any, @Body('token') token: string) { const user_id = req.user.id; await this.fcm_service.addUserDeviceToken(user_id, token); return { success: true }; } - @Delete('token') @HttpCode(HttpStatus.OK) @ApiOperation(remove_device_token_swagger.operation) @ApiOkResponse(remove_device_token_swagger.responses.success) @ApiUnauthorizedResponse(remove_device_token_swagger.responses.unauthorized) + @ApiBearerAuth('JWT-auth') + @UseGuards(JwtAuthGuard) + @Delete('token') async removeDeviceToken(@Req() req: any) { const user_id = req.user.id; await this.fcm_service.removeUserDeviceToken(user_id); diff --git a/src/fcm/fcm.module.ts b/src/expo/expo.module.ts similarity index 76% rename from src/fcm/fcm.module.ts rename to src/expo/expo.module.ts index a777e1b4..4c505edc 100644 --- a/src/fcm/fcm.module.ts +++ b/src/expo/expo.module.ts @@ -1,6 +1,6 @@ import { Module } from '@nestjs/common'; -import { FCMService } from './fcm.service'; -import { FcmController } from './fcm.controller'; +import { FCMService } from './expo.service'; +import { FcmController } from './expo.controller'; import { TypeOrmModule } from '@nestjs/typeorm'; import { User } from 'src/user/entities'; diff --git a/src/expo/expo.service.spec.ts b/src/expo/expo.service.spec.ts new file mode 100644 index 00000000..9a9118b6 --- /dev/null +++ b/src/expo/expo.service.spec.ts @@ -0,0 +1,718 @@ +import { Test, TestingModule } from '@nestjs/testing'; +import { getRepositoryToken } from '@nestjs/typeorm'; +import { Repository } from 'typeorm'; +import { FCMService } from './expo.service'; +import { User } from 'src/user/entities'; +import { NotificationType } from 'src/notifications/enums/notification-types'; +import { Expo } from 'expo-server-sdk'; + +// Mock expo-server-sdk +jest.mock('expo-server-sdk'); + +describe('FCMService', () => { + let service: FCMService; + let mock_user_repository: any; + let mock_expo_instance: any; + + const mock_user = { + id: 'user-123', + fcm_token: 'ExponentPushToken[mock-token-123]', + username: 'testuser', + }; + + beforeEach(async () => { + // Mock Expo instance methods + mock_expo_instance = { + sendPushNotificationsAsync: jest.fn().mockResolvedValue([ + { + status: 'ok', + id: 'mock-receipt-id', + }, + ]), + chunkPushNotifications: jest.fn((messages) => [messages]), + chunkPushNotificationReceiptIds: jest.fn((ids) => [ids]), + getPushNotificationReceiptsAsync: jest.fn().mockResolvedValue({}), + }; + + // Mock Expo constructor and static method + (Expo as unknown as jest.Mock).mockImplementation(() => mock_expo_instance); + (Expo.isExpoPushToken as unknown as jest.Mock) = jest.fn().mockReturnValue(true); + + const mock_query_builder = { + where: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + getOne: jest.fn().mockResolvedValue(mock_user), + }; + + mock_user_repository = { + findOne: jest.fn().mockResolvedValue(mock_user), + update: jest.fn().mockResolvedValue({ affected: 1 }), + createQueryBuilder: jest.fn().mockReturnValue(mock_query_builder), + }; + + const module: TestingModule = await Test.createTestingModule({ + providers: [ + FCMService, + { + provide: getRepositoryToken(User), + useValue: mock_user_repository, + }, + ], + }).compile(); + + service = module.get(FCMService); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should be defined', () => { + expect(service).toBeDefined(); + }); + + describe('Constructor', () => { + it('should initialize Expo SDK client', () => { + expect(Expo).toHaveBeenCalledWith({ + useFcmV1: true, + }); + }); + }); + + describe('sendToDevice', () => { + it('should send message to device successfully', async () => { + const device_token = 'ExponentPushToken[valid-token]'; + const data = { key: 'value', type: 'LIKE' }; + const notification = { title: 'Test Title', body: 'Test Body' }; + + const result = await service.sendToDevice(device_token, data, notification); + + expect(Expo.isExpoPushToken).toHaveBeenCalledWith(device_token); + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith([ + { + to: device_token, + sound: 'default', + title: notification.title, + body: notification.body, + data: data, + }, + ]); + expect(result).toEqual({ status: 'ok', id: 'mock-receipt-id' }); + }); + + it('should send message without notification object', async () => { + const device_token = 'ExponentPushToken[valid-token]'; + const data = { key: 'value' }; + + const result = await service.sendToDevice(device_token, data); + + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith([ + { + to: device_token, + sound: 'default', + title: undefined, + body: undefined, + data: data, + }, + ]); + expect(result).toEqual({ status: 'ok', id: 'mock-receipt-id' }); + }); + + it('should throw error for invalid push token', async () => { + const invalid_token = 'invalid-token'; + (Expo.isExpoPushToken as unknown as jest.Mock).mockReturnValueOnce(false); + + const logger_spy = jest.spyOn(service['logger'], 'error'); + + await expect(service.sendToDevice(invalid_token, { key: 'value' })).rejects.toThrow( + 'Invalid Expo push token' + ); + + expect(logger_spy).toHaveBeenCalledWith( + `Push token ${invalid_token} is not a valid Expo push token` + ); + }); + + it('should throw error when ticket status is error', async () => { + mock_expo_instance.sendPushNotificationsAsync.mockResolvedValueOnce([ + { + status: 'error', + message: 'Device not registered', + details: { error: 'DeviceNotRegistered' }, + }, + ]); + + const logger_spy = jest.spyOn(service['logger'], 'error'); + + await expect( + service.sendToDevice('ExponentPushToken[valid]', { key: 'value' }) + ).rejects.toThrow('Device not registered'); + + expect(logger_spy).toHaveBeenCalledWith( + 'Error sending push notification: Device not registered' + ); + }); + + it('should log successful send', async () => { + const logger_spy = jest.spyOn(service['logger'], 'log'); + + await service.sendToDevice('ExponentPushToken[valid]', { key: 'value' }); + + expect(logger_spy).toHaveBeenCalledWith( + expect.stringContaining('Expo push notification sent:') + ); + }); + }); + + describe('addUserDeviceToken', () => { + it('should save FCM token for user successfully', async () => { + const user_id = 'user-123'; + const device_token = 'new-device-token'; + + await service.addUserDeviceToken(user_id, device_token); + + expect(mock_user_repository.update).toHaveBeenCalledWith(user_id, { + fcm_token: device_token, + }); + }); + + it('should log error and throw when saving token fails', async () => { + const error = new Error('Database error'); + mock_user_repository.update.mockRejectedValue(error); + + const logger_spy = jest.spyOn(service['logger'], 'error'); + + await expect(service.addUserDeviceToken('user-123', 'token')).rejects.toThrow( + 'Database error' + ); + + expect(logger_spy).toHaveBeenCalledWith( + 'Error saving FCM token for user user-123: Database error' + ); + }); + }); + + describe('removeUserDeviceToken', () => { + it('should remove FCM token for user successfully', async () => { + const user_id = 'user-123'; + + await service.removeUserDeviceToken(user_id); + + expect(mock_user_repository.update).toHaveBeenCalledWith(user_id, { + fcm_token: null, + }); + }); + + it('should log error and throw when removing token fails', async () => { + const error = new Error('Database error'); + mock_user_repository.update.mockRejectedValue(error); + + const logger_spy = jest.spyOn(service['logger'], 'error'); + + await expect(service.removeUserDeviceToken('user-123')).rejects.toThrow( + 'Database error' + ); + + expect(logger_spy).toHaveBeenCalledWith( + 'Error removing FCM token for user user-123: Database error' + ); + }); + }); + + describe('sendNotificationToUserDevice', () => { + it('should send LIKE notification successfully', async () => { + const payload = { + likers: [{ name: 'John Doe' }], + tweets: [{ content: 'Tweet content', id: 'tweet-123' }], + tweet_id: 'tweet-123', + }; + + const result = await service.sendNotificationToUserDevice( + 'user-123', + NotificationType.LIKE, + payload + ); + + expect(mock_user_repository.createQueryBuilder).toHaveBeenCalledWith('user'); + + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith([ + { + to: 'ExponentPushToken[mock-token-123]', + sound: 'default', + title: 'Liked by John Doe', + body: 'Tweet content', + data: { + tweet_id: 'tweet-123', + type: 'tweet', + user_id: undefined, + }, + }, + ]); + + expect(result).toBe(true); + }); + + it('should send REPLY notification successfully', async () => { + const payload = { + replier: { name: 'Jane Smith' }, + reply_tweet: { content: 'Reply content', id: 'tweet-456' }, + tweet_id: 'tweet-456', + }; + + const result = await service.sendNotificationToUserDevice( + 'user-123', + NotificationType.REPLY, + payload + ); + + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith( + expect.arrayContaining([ + expect.objectContaining({ + title: 'Jane Smith replied:', + body: 'Reply content', + }), + ]) + ); + + expect(result).toBe(true); + }); + + it('should send REPOST notification successfully', async () => { + const payload = { + reposter: { name: 'Bob Johnson' }, + tweet: { content: 'Tweet content', id: 'tweet-789' }, + }; + + await service.sendNotificationToUserDevice( + 'user-123', + NotificationType.REPOST, + payload + ); + + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith( + expect.arrayContaining([ + expect.objectContaining({ + title: 'Reposted by Bob Johnson:', + body: 'Tweet content', + }), + ]) + ); + }); + + it('should send QUOTE notification successfully', async () => { + const payload = { + quoted_by: { username: 'alice' }, + quote: { content: 'Quote content', id: 'tweet-101' }, + }; + + await service.sendNotificationToUserDevice('user-123', NotificationType.QUOTE, payload); + + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith( + expect.arrayContaining([ + expect.objectContaining({ + title: 'Yapper', + body: '@alice quoted your post and said: Quote content', + }), + ]) + ); + }); + + it('should send MENTION notification successfully', async () => { + const payload = { + mentioned_by: { name: 'Charlie Wilson' }, + tweet: { content: 'Tweet content', id: 'tweet-202' }, + }; + + await service.sendNotificationToUserDevice( + 'user-123', + NotificationType.MENTION, + payload + ); + + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith( + expect.arrayContaining([ + expect.objectContaining({ + title: 'Mentioned by Charlie Wilson:', + body: 'Tweet content', + }), + ]) + ); + }); + + it('should send MESSAGE notification successfully', async () => { + const payload = { + sender: { name: 'David Lee' }, + content: 'Hello!', + chat_id: 'chat-123', + }; + + await service.sendNotificationToUserDevice( + 'user-123', + NotificationType.MESSAGE, + payload + ); + + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith( + expect.arrayContaining([ + expect.objectContaining({ + title: 'David Lee', + body: 'Hello!', + }), + ]) + ); + }); + + it('should send FOLLOW notification with follower_name', async () => { + const payload = { + follower_username: 'emma', + follower_id: 'user-303', + }; + + await service.sendNotificationToUserDevice( + 'user-123', + NotificationType.FOLLOW, + payload + ); + + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith( + expect.arrayContaining([ + expect.objectContaining({ + title: 'Yapper', + body: '@emma followed you!', + }), + ]) + ); + }); + + it('should send notification with fallback content when payload is missing fields', async () => { + const payload = { + // Missing required fields + tweet_id: 'tweet-123', + }; + + const result = await service.sendNotificationToUserDevice( + 'user-123', + NotificationType.LIKE, + payload + ); + + expect(result).toBe(true); + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith( + expect.arrayContaining([ + expect.objectContaining({ + title: 'Liked by Someone', + body: 'your post', + }), + ]) + ); + }); + + it('should return false and warn if user has no FCM token', async () => { + const mock_query_builder = { + where: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + getOne: jest.fn().mockResolvedValue({ id: 'user-123', fcm_token: null }), + }; + mock_user_repository.createQueryBuilder.mockReturnValue(mock_query_builder); + + const logger_spy = jest.spyOn(service['logger'], 'warn'); + + const result = await service.sendNotificationToUserDevice( + 'user-123', + NotificationType.LIKE, + {} + ); + + expect(logger_spy).toHaveBeenCalledWith('No FCM token found for user user-123'); + expect(mock_expo_instance.sendPushNotificationsAsync).not.toHaveBeenCalled(); + expect(result).toBe(false); + }); + + it('should return false and warn if user not found', async () => { + const mock_query_builder = { + where: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + getOne: jest.fn().mockResolvedValue(null), + }; + mock_user_repository.createQueryBuilder.mockReturnValue(mock_query_builder); + + const logger_spy = jest.spyOn(service['logger'], 'warn'); + + const result = await service.sendNotificationToUserDevice( + 'user-999', + NotificationType.LIKE, + {} + ); + + expect(logger_spy).toHaveBeenCalledWith('No FCM token found for user user-999'); + expect(result).toBe(false); + }); + + it('should log success when notification sent', async () => { + const logger_spy = jest.spyOn(service['logger'], 'log'); + + await service.sendNotificationToUserDevice('user-123', NotificationType.LIKE, { + likers: [{ name: 'Test' }], + tweets: [{ content: 'Content', id: 'tweet-1' }], + }); + + expect(logger_spy).toHaveBeenCalledWith('Notification sent via FCM to user user-123'); + }); + + it('should return false and log error if sending fails', async () => { + mock_expo_instance.sendPushNotificationsAsync.mockRejectedValue( + new Error('Send failed') + ); + + const logger_spy = jest.spyOn(service['logger'], 'error'); + + const result = await service.sendNotificationToUserDevice( + 'user-123', + NotificationType.LIKE, + { + likers: [{ name: 'Test' }], + tweets: [{ content: 'Content', id: 'tweet-1' }], + } + ); + + expect(logger_spy).toHaveBeenCalledWith( + 'Error sending FCM notification to user user-123: Send failed' + ); + expect(result).toBe(false); + }); + + it('should handle payload with nested user object structure', async () => { + const payload = { + likers: [ + { + name: 'Complex User', + id: 'user-789', + username: 'complexuser', + }, + ], + tweets: [{ content: 'Tweet content', id: 'tweet-123' }], + tweet_id: 'tweet-123', + }; + + await service.sendNotificationToUserDevice('user-123', NotificationType.LIKE, payload); + + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith( + expect.arrayContaining([ + expect.objectContaining({ + data: { + tweet_id: 'tweet-123', + type: 'tweet', + user_id: undefined, + }, + }), + ]) + ); + }); + + it('should handle LIKE notification with multiple likers', async () => { + // Implementation uses first liker from likers array + const payload = { + likers: [{ name: 'User1' }, { name: 'User2' }, { name: 'User3' }], + tweets: [{ content: 'Tweet content', id: 'tweet-123' }], + tweet_id: 'tweet-123', + }; + + await service.sendNotificationToUserDevice('user-123', NotificationType.LIKE, payload); + + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith( + expect.arrayContaining([ + expect.objectContaining({ + title: 'Liked by User1', + body: 'Tweet content', + }), + ]) + ); + }); + + it('should handle REPOST notification with reposter object', async () => { + // Implementation uses reposter.name, not reposters array + const payload = { + reposter: { name: 'User1', id: 'reposter-id' }, + tweet: { content: 'Tweet content', id: 'tweet-123' }, + }; + + await service.sendNotificationToUserDevice( + 'user-123', + NotificationType.REPOST, + payload + ); + + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith( + expect.arrayContaining([ + expect.objectContaining({ + title: 'Reposted by User1:', + body: 'Tweet content', + }), + ]) + ); + }); + + it('should handle FOLLOW notification with follower fields', async () => { + // Implementation uses follower_username and follower_id, not follower object + const payload = { + follower_username: 'newuser', + follower_name: 'New User', + follower_id: 'user-new', + }; + + await service.sendNotificationToUserDevice( + 'user-123', + NotificationType.FOLLOW, + payload + ); + + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith( + expect.arrayContaining([ + expect.objectContaining({ + body: '@newuser followed you!', + data: expect.objectContaining({ + type: 'user', + user_id: 'user-new', + }), + }), + ]) + ); + }); + + it('should handle QUOTE notification with quoted_by object', async () => { + // Implementation uses quoted_by.username and quote.content + const payload = { + quoted_by: { + username: 'quoter', + name: 'Quoter Name', + id: 'quoter-id', + }, + quote: { content: 'Quote text', id: 'tweet-quote' }, + }; + + await service.sendNotificationToUserDevice('user-123', NotificationType.QUOTE, payload); + + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith( + expect.arrayContaining([ + expect.objectContaining({ + body: '@quoter quoted your post and said: Quote text', + }), + ]) + ); + }); + + it('should handle MENTION notification with mentioned_by object', async () => { + // Implementation uses mentioned_by.name + const payload = { + mentioned_by: { + name: 'Mentioner', + id: 'mentioner-id', + }, + tweet: { content: 'Mention tweet', id: 'tweet-mention' }, + }; + + await service.sendNotificationToUserDevice( + 'user-123', + NotificationType.MENTION, + payload + ); + + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith( + expect.arrayContaining([ + expect.objectContaining({ + title: 'Mentioned by Mentioner:', + body: 'Mention tweet', + }), + ]) + ); + }); + + it('should handle REPLY notification with replier object', async () => { + const payload = { + replier: { + name: 'Replier Name', + }, + reply_tweet: { content: 'Reply text', id: 'tweet-reply' }, + }; + + await service.sendNotificationToUserDevice('user-123', NotificationType.REPLY, payload); + + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith( + expect.arrayContaining([ + expect.objectContaining({ + title: 'Replier Name replied:', + body: 'Reply text', + }), + ]) + ); + }); + + it('should handle MESSAGE notification with sender object', async () => { + const payload = { + sender: { + name: 'Sender Name', + id: 'sender-id', + }, + content: 'Message content', + chat_id: 'chat-123', + }; + + await service.sendNotificationToUserDevice( + 'user-123', + NotificationType.MESSAGE, + payload + ); + + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith( + expect.arrayContaining([ + expect.objectContaining({ + title: 'Sender Name', + body: 'Message content', + data: expect.objectContaining({ + type: 'chat', + chat_id: 'chat-123', + }), + }), + ]) + ); + }); + + it('should handle long tweet content in notification body', async () => { + // Implementation passes content as-is without truncation + const long_content = 'A'.repeat(200); + const payload = { + liker: { name: 'User', id: 'liker-id' }, + tweet: { content: long_content, id: 'tweet-123' }, + }; + + await service.sendNotificationToUserDevice('user-123', NotificationType.LIKE, payload); + + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith( + expect.arrayContaining([ + expect.objectContaining({ + body: long_content, + title: 'Liked by User', + }), + ]) + ); + }); + + it('should handle empty arrays in aggregated notifications', async () => { + const payload = { + likers: [], + tweets: [], + }; + + await service.sendNotificationToUserDevice('user-123', NotificationType.LIKE, payload); + + expect(mock_expo_instance.sendPushNotificationsAsync).toHaveBeenCalledWith( + expect.arrayContaining([ + expect.objectContaining({ + title: 'Liked by Someone', + body: 'your post', + }), + ]) + ); + }); + }); +}); diff --git a/src/expo/expo.service.ts b/src/expo/expo.service.ts new file mode 100644 index 00000000..3f801c86 --- /dev/null +++ b/src/expo/expo.service.ts @@ -0,0 +1,325 @@ +import { Injectable, Logger } from '@nestjs/common'; +import { InjectRepository } from '@nestjs/typeorm'; +import { Expo, ExpoPushErrorReceipt, ExpoPushMessage, ExpoPushTicket } from 'expo-server-sdk'; +import { NotificationType } from 'src/notifications/enums/notification-types'; +import { User } from 'src/user/entities'; +import { Repository } from 'typeorm'; + +@Injectable() +export class FCMService { + private readonly logger = new Logger(FCMService.name); + private readonly expo: Expo; + + constructor(@InjectRepository(User) private readonly user_repository: Repository) { + this.expo = new Expo({ + useFcmV1: true, + }); + } + + async sendToDevice( + device_token: string, + data: any, + notification?: { title: string; body: string } + ): Promise { + try { + if (!Expo.isExpoPushToken(device_token)) { + this.logger.error( + `Push token ${String(device_token)} is not a valid Expo push token` + ); + throw new Error('Invalid Expo push token'); + } + + const message: ExpoPushMessage = { + to: device_token, + sound: 'default', + title: notification?.title, + body: notification?.body, + data: data, + }; + + const ticket_chunk = await this.expo.sendPushNotificationsAsync([message]); + const ticket = ticket_chunk[0]; + + this.logger.log(`Expo push notification sent: ${JSON.stringify(ticket)}`); + + if (ticket.status === 'error') { + const error_ticket = ticket; + const error_message = String(error_ticket.message || 'Unknown error'); + this.logger.error(`Error sending push notification: ${error_message}`); + if (error_ticket.details?.error) { + this.logger.error(`Error code: ${String(error_ticket.details.error)}`); + } + throw new Error(error_message); + } + + return ticket; + } catch (err) { + this.logger.error(`Expo push notification error: ${err.message}`); + throw err; + } + } + + async addUserDeviceToken(user_id: string, device_token: string) { + try { + await this.user_repository.update(user_id, { fcm_token: device_token }); + } catch (error) { + this.logger.error(`Error saving FCM token for user ${user_id}: ${error.message}`); + throw error; + } + } + + async removeUserDeviceToken(user_id: string) { + try { + await this.user_repository.update(user_id, { fcm_token: null }); + } catch (error) { + this.logger.error(`Error removing FCM token for user ${user_id}: ${error.message}`); + throw error; + } + } + + async sendNotificationToUserDevice( + user_id: string, + notification_type: NotificationType, + payload: any + ): Promise { + try { + const user = await this.user_repository + .createQueryBuilder('user') + .where('user.id = :id', { id: user_id }) + .select(['user.fcm_token']) + .getOne(); + + if (!user?.fcm_token) { + this.logger.warn(`No FCM token found for user ${user_id}`); + return false; + } + + const notification_content = this.getNotificationContent(notification_type, payload); + + const notification = { + title: notification_content.title, + body: notification_content.body, + }; + + await this.sendToDevice(user.fcm_token, notification_content.data, notification); + this.logger.log(`Notification sent via FCM to user ${user_id}`); + return true; + } catch (error) { + this.logger.error( + `Error sending FCM notification to user ${user_id}: ${error.message}` + ); + return false; + } + } + + private getNotificationContent( + type: NotificationType, + payload: any + ): { title: string; body: string; data: any } { + switch (type) { + case NotificationType.FOLLOW: + return { + title: 'Yapper', + body: `@${payload.follower_username || 'Someone'} followed you!`, + data: { user_id: payload.follower_id, type: 'user' }, + }; + case NotificationType.MENTION: { + let content = payload.tweet?.content; + const mentions = payload.tweet?.mentions; + if (content && mentions) + mentions.forEach((mention, index) => { + content = content.replace(`\u200B$(${index})\u200C`, `@${mention}`); + }); + return { + title: `Mentioned by ${payload.mentioned_by?.name || 'Someone'}:`, + body: content || 'You were mentioned in a post', + data: { + tweet_id: payload.tweet?.id || payload.tweet?.tweet_id, + user_id: payload.mentioned_by?.id, + type: 'tweet', + }, + }; + } + case NotificationType.REPLY: + return { + title: `${payload.replier?.name || 'Someone'} replied:`, + body: payload.reply_tweet?.content || 'replied to your post', + data: { + tweet_id: payload.reply_tweet?.id || payload.reply_tweet?.tweet_id, + user_id: payload.replier?.id, + type: 'tweet', + }, + }; + case NotificationType.QUOTE: + return { + title: 'Yapper', + //eslint-disable-next-line + body: `@${payload.quoted_by?.username || 'Someone'} quoted your post${ + payload.quote?.content ? ` and said: ${payload.quote.content}` : '' + }`, + data: { + tweet_id: payload.quote?.id || payload.quote?.tweet_id, + user_id: payload.quoted_by?.id, + type: 'tweet', + }, + }; + case NotificationType.LIKE: { + const liker_name = payload.liker?.name || payload.likers?.[0]?.name || 'Someone'; + const liked_tweet_content = + payload.tweet?.content || payload.tweets?.[0]?.content || 'your post'; + const liked_tweet_id = + payload.tweet?.tweet_id || payload.tweet?.id || payload.tweets?.[0]?.id; + return { + title: `Liked by ${liker_name}`, + body: liked_tweet_content, + data: { tweet_id: liked_tweet_id, user_id: payload.liker?.id, type: 'tweet' }, + }; + } + case NotificationType.REPOST: { + const reposter_name = payload.reposter?.name || 'Someone'; + const reposted_tweet_content = payload.tweet?.content || 'your post'; + const reposted_tweet_id = + payload.tweet?.tweet_id || payload.tweet?.id || payload.tweets?.[0]?.id; + return { + title: `Reposted by ${reposter_name}:`, + body: reposted_tweet_content, + data: { + tweet_id: reposted_tweet_id, + user_id: payload.reposter?.id, + type: 'tweet', + }, + }; + } + case NotificationType.MESSAGE: + return { + title: payload.sender?.name || 'New Message', + body: payload.content || 'You have a new message', + data: { chat_id: payload.chat_id, type: 'chat' }, + }; + default: + return { + title: 'Yapper', + body: 'You have a new notification', + data: {}, + }; + } + } + + /** + * Send push notifications to multiple devices in batches + * @param messages Array of Expo push messages + * @returns Array of push tickets + */ + async sendBatchNotifications(messages: ExpoPushMessage[]): Promise { + try { + // Filter invalid tokens + const valid_messages = messages.filter((message) => { + if (!Expo.isExpoPushToken(message.to as string)) { + const token = Array.isArray(message.to) ? message.to.join(', ') : message.to; + this.logger.error(`Invalid Expo push token: ${token}`); + return false; + } + return true; + }); + + if (valid_messages.length === 0) { + this.logger.warn('No valid push tokens to send notifications to'); + return []; + } + + // Chunk the notifications + const chunks = this.expo.chunkPushNotifications(valid_messages); + const tickets: ExpoPushTicket[] = []; + + // Send each chunk + for (const chunk of chunks) { + try { + const ticket_chunk = await this.expo.sendPushNotificationsAsync(chunk); + tickets.push(...ticket_chunk); + + ticket_chunk.forEach((ticket, index) => { + if (ticket.status === 'error') { + const token = Array.isArray(chunk[index].to) + ? chunk[index].to.join(', ') + : chunk[index].to; + this.logger.error( + `Error sending notification to ${token}: ${ticket.message}` + ); + if (ticket.details?.error) { + this.logger.error(`Error code: ${ticket.details.error}`); + } + } + }); + } catch (error) { + this.logger.error(`Error sending push notification chunk: ${error.message}`); + } + } + + return tickets; + } catch (error) { + this.logger.error(`Error in batch notification send: ${error.message}`); + throw error; + } + } + + /** + * Check the receipts for sent push notifications + * @param receipt_ids Array of receipt IDs from push tickets + */ + async checkPushNotificationReceipts(receipt_ids: string[]): Promise { + try { + // Chunk the receipt IDs + const receipt_id_chunks = this.expo.chunkPushNotificationReceiptIds(receipt_ids); + + for (const chunk of receipt_id_chunks) { + try { + const receipts = await this.expo.getPushNotificationReceiptsAsync(chunk); + + // Check errors for each receipt + for (const receipt_id in receipts) { + const receipt = receipts[receipt_id]; + + if (receipt.status === 'ok') { + continue; + } + + if (receipt.status === 'error') { + const error_receipt = receipt; + this.logger.error( + `Error in push notification receipt ${receipt_id}: ${error_receipt.message}` + ); + + if (error_receipt.details?.error) { + this.logger.error(`Error code: ${error_receipt.details.error}`); + + if (error_receipt.details.error === 'DeviceNotRegistered') { + this.logger.warn( + `Device token is no longer valid: ${receipt_id}` + ); + await this.user_repository.update(receipt_id, { + fcm_token: null, + }); + } else if (error_receipt.details.error === 'MessageTooBig') { + this.logger.warn( + `Notification payload too large for receipt: ${receipt_id}` + ); + } else if (error_receipt.details.error === 'MessageRateExceeded') { + this.logger.warn( + `Rate limit exceeded for receipt: ${receipt_id}` + ); + } + } + } + } + } catch (error) { + this.logger.error( + `Error fetching push notification receipts: ${error.message}` + ); + } + } + } catch (error) { + this.logger.error(`Error checking push notification receipts: ${error.message}`); + throw error; + } + } +} diff --git a/src/fcm/fcm.swagger.ts b/src/expo/expo.swagger.ts similarity index 100% rename from src/fcm/fcm.swagger.ts rename to src/expo/expo.swagger.ts diff --git a/src/fcm/fcm.service.spec.ts b/src/fcm/fcm.service.spec.ts deleted file mode 100644 index 8000a3bb..00000000 --- a/src/fcm/fcm.service.spec.ts +++ /dev/null @@ -1,550 +0,0 @@ -import { Test, TestingModule } from '@nestjs/testing'; -import { getRepositoryToken } from '@nestjs/typeorm'; -import { Repository } from 'typeorm'; -import { FCMService } from './fcm.service'; -import { User } from 'src/user/entities'; -import { NotificationType } from 'src/notifications/enums/notification-types'; -import * as admin from 'firebase-admin'; - -// Mock firebase-admin -jest.mock('firebase-admin', () => ({ - initializeApp: jest.fn(), - credential: { - cert: jest.fn(), - }, - messaging: jest.fn(), -})); - -describe('FCMService', () => { - let service: FCMService; - let mock_user_repository: any; - let mock_messaging: any; - - const mock_user = { - id: 'user-123', - fcm_token: 'mock-fcm-token-123', - username: 'testuser', - }; - - beforeEach(async () => { - // Reset environment variables - process.env.FIREBASE_PRIVATE_KEY = 'mock-private-key\\nwith-newlines'; - process.env.FIREBASE_PROJECT_ID = 'mock-project-id'; - process.env.FIREBASE_CLIENT_EMAIL = 'mock@client.email'; - - mock_messaging = { - send: jest.fn().mockResolvedValue('mock-response-id'), - }; - - (admin.messaging as jest.Mock).mockReturnValue(mock_messaging); - - mock_user_repository = { - findOne: jest.fn().mockResolvedValue(mock_user), - update: jest.fn().mockResolvedValue({ affected: 1 }), - }; - - const module: TestingModule = await Test.createTestingModule({ - providers: [ - FCMService, - { - provide: getRepositoryToken(User), - useValue: mock_user_repository, - }, - ], - }).compile(); - - service = module.get(FCMService); - }); - - afterEach(() => { - jest.clearAllMocks(); - }); - - it('should be defined', () => { - expect(service).toBeDefined(); - }); - - describe('Constructor', () => { - it('should initialize Firebase Admin SDK with correct credentials', () => { - expect(admin.credential.cert).toHaveBeenCalledWith({ - projectId: 'mock-project-id', - clientEmail: 'mock@client.email', - privateKey: 'mock-private-key\nwith-newlines', - }); - expect(admin.initializeApp).toHaveBeenCalled(); - }); - }); - - describe('sendToDevice', () => { - it('should send message to device successfully', async () => { - const device_token = 'mock-device-token'; - const data = { key: 'value', type: 'LIKE' }; - const notification = { title: 'Test Title', body: 'Test Body' }; - - const result = await service.sendToDevice(device_token, data, notification); - - expect(mock_messaging.send).toHaveBeenCalledWith({ - token: device_token, - data: data, - notification: notification, - }); - expect(result).toBe('mock-response-id'); - }); - - it('should send message without notification object', async () => { - const device_token = 'mock-device-token'; - const data = { key: 'value' }; - - const result = await service.sendToDevice(device_token, data); - - expect(mock_messaging.send).toHaveBeenCalledWith({ - token: device_token, - data: data, - notification: undefined, - }); - expect(result).toBe('mock-response-id'); - }); - - it('should log error and throw when sending fails', async () => { - const error = new Error('FCM send failed'); - mock_messaging.send.mockRejectedValue(error); - - const logger_spy = jest.spyOn(service['logger'], 'error'); - - await expect(service.sendToDevice('device-token', { key: 'value' })).rejects.toThrow( - 'FCM send failed' - ); - - expect(logger_spy).toHaveBeenCalledWith('FCM Error: FCM send failed'); - }); - - it('should log successful send', async () => { - const logger_spy = jest.spyOn(service['logger'], 'log'); - - await service.sendToDevice('device-token', { key: 'value' }); - - expect(logger_spy).toHaveBeenCalledWith('FCM Sent: mock-response-id'); - }); - }); - - describe('addUserDeviceToken', () => { - it('should save FCM token for user successfully', async () => { - const user_id = 'user-123'; - const device_token = 'new-device-token'; - - await service.addUserDeviceToken(user_id, device_token); - - expect(mock_user_repository.update).toHaveBeenCalledWith(user_id, { - fcm_token: device_token, - }); - }); - - it('should log error and throw when saving token fails', async () => { - const error = new Error('Database error'); - mock_user_repository.update.mockRejectedValue(error); - - const logger_spy = jest.spyOn(service['logger'], 'error'); - - await expect(service.addUserDeviceToken('user-123', 'token')).rejects.toThrow( - 'Database error' - ); - - expect(logger_spy).toHaveBeenCalledWith( - 'Error saving FCM token for user user-123: Database error' - ); - }); - }); - - describe('removeUserDeviceToken', () => { - it('should remove FCM token for user successfully', async () => { - const user_id = 'user-123'; - - await service.removeUserDeviceToken(user_id); - - expect(mock_user_repository.update).toHaveBeenCalledWith(user_id, { - fcm_token: null, - }); - }); - - it('should log error and throw when removing token fails', async () => { - const error = new Error('Database error'); - mock_user_repository.update.mockRejectedValue(error); - - const logger_spy = jest.spyOn(service['logger'], 'error'); - - await expect(service.removeUserDeviceToken('user-123')).rejects.toThrow( - 'Database error' - ); - - expect(logger_spy).toHaveBeenCalledWith( - 'Error removing FCM token for user user-123: Database error' - ); - }); - }); - - describe('sendNotificationToUserDevice', () => { - it('should send LIKE notification successfully', async () => { - const payload = { - liker: { name: 'John Doe' }, - tweet_id: 'tweet-123', - }; - - const result = await service.sendNotificationToUserDevice( - 'user-123', - NotificationType.LIKE, - payload - ); - - expect(mock_user_repository.findOne).toHaveBeenCalledWith({ - where: { id: 'user-123' }, - select: ['fcm_token'], - }); - - expect(mock_messaging.send).toHaveBeenCalledWith({ - token: 'mock-fcm-token-123', - data: { - type: NotificationType.LIKE, - ...payload, - }, - notification: { - title: 'New LIKE', - body: 'John Doe liked your tweet', - }, - }); - - expect(result).toBe(true); - }); - - it('should send REPLY notification successfully', async () => { - const payload = { - replier: { name: 'Jane Smith' }, - tweet_id: 'tweet-456', - }; - - const result = await service.sendNotificationToUserDevice( - 'user-123', - NotificationType.REPLY, - payload - ); - - expect(mock_messaging.send).toHaveBeenCalledWith( - expect.objectContaining({ - notification: { - title: 'New REPLY', - body: 'Jane Smith replied to your tweet', - }, - }) - ); - - expect(result).toBe(true); - }); - - it('should send REPOST notification successfully', async () => { - const payload = { - reposter: { name: 'Bob Johnson' }, - }; - - await service.sendNotificationToUserDevice( - 'user-123', - NotificationType.REPOST, - payload - ); - - expect(mock_messaging.send).toHaveBeenCalledWith( - expect.objectContaining({ - notification: expect.objectContaining({ - body: 'Bob Johnson reposted your tweet', - }), - }) - ); - }); - - it('should send QUOTE notification successfully', async () => { - const payload = { - quoted_by: { name: 'Alice Brown' }, - }; - - await service.sendNotificationToUserDevice('user-123', NotificationType.QUOTE, payload); - - expect(mock_messaging.send).toHaveBeenCalledWith( - expect.objectContaining({ - notification: expect.objectContaining({ - body: 'Alice Brown quoted your tweet', - }), - }) - ); - }); - - it('should send MENTION notification successfully', async () => { - const payload = { - mentioned_by: { name: 'Charlie Wilson' }, - }; - - await service.sendNotificationToUserDevice( - 'user-123', - NotificationType.MENTION, - payload - ); - - expect(mock_messaging.send).toHaveBeenCalledWith( - expect.objectContaining({ - notification: expect.objectContaining({ - body: 'Charlie Wilson mentioned you in a tweet', - }), - }) - ); - }); - - it('should send MESSAGE notification successfully', async () => { - const payload = { - sender: { name: 'David Lee' }, - message: 'Hello!', - }; - - await service.sendNotificationToUserDevice( - 'user-123', - NotificationType.MESSAGE, - payload - ); - - expect(mock_messaging.send).toHaveBeenCalledWith( - expect.objectContaining({ - notification: expect.objectContaining({ - body: 'David Lee sent you a message', - }), - }) - ); - }); - - it('should send FOLLOW notification with follower_name', async () => { - const payload = { - follower_name: 'Emma Davis', - }; - - await service.sendNotificationToUserDevice( - 'user-123', - NotificationType.FOLLOW, - payload - ); - - expect(mock_messaging.send).toHaveBeenCalledWith( - expect.objectContaining({ - notification: expect.objectContaining({ - body: 'Emma Davis started following you', - }), - }) - ); - }); - - it('should use "Someone" as fallback username when user field not found', async () => { - const payload = { - // No user field - tweet_id: 'tweet-123', - }; - - await service.sendNotificationToUserDevice('user-123', NotificationType.LIKE, payload); - - expect(mock_messaging.send).toHaveBeenCalledWith( - expect.objectContaining({ - notification: expect.objectContaining({ - body: 'Someone liked your tweet', - }), - }) - ); - }); - - it('should return false and warn if user has no FCM token', async () => { - mock_user_repository.findOne.mockResolvedValue({ id: 'user-123', fcm_token: null }); - - const logger_spy = jest.spyOn(service['logger'], 'warn'); - - const result = await service.sendNotificationToUserDevice( - 'user-123', - NotificationType.LIKE, - {} - ); - - expect(logger_spy).toHaveBeenCalledWith('No FCM token found for user user-123'); - expect(mock_messaging.send).not.toHaveBeenCalled(); - expect(result).toBe(false); - }); - - it('should return false and warn if user not found', async () => { - mock_user_repository.findOne.mockResolvedValue(null); - - const logger_spy = jest.spyOn(service['logger'], 'warn'); - - const result = await service.sendNotificationToUserDevice( - 'user-999', - NotificationType.LIKE, - {} - ); - - expect(logger_spy).toHaveBeenCalledWith('No FCM token found for user user-999'); - expect(result).toBe(false); - }); - - it('should log success when notification sent', async () => { - const logger_spy = jest.spyOn(service['logger'], 'log'); - - await service.sendNotificationToUserDevice('user-123', NotificationType.LIKE, { - liker: { name: 'Test' }, - }); - - expect(logger_spy).toHaveBeenCalledWith('Notification sent via FCM to user user-123'); - }); - - it('should return false and log error if sending fails', async () => { - mock_messaging.send.mockRejectedValue(new Error('Send failed')); - - const logger_spy = jest.spyOn(service['logger'], 'error'); - - const result = await service.sendNotificationToUserDevice( - 'user-123', - NotificationType.LIKE, - { liker: { name: 'Test' } } - ); - - expect(logger_spy).toHaveBeenCalledWith( - 'Error sending FCM notification to user user-123: Send failed' - ); - expect(result).toBe(false); - }); - - it('should handle payload with nested user object structure', async () => { - const payload = { - liker: { - name: 'Complex User', - id: 'user-789', - username: 'complexuser', - }, - tweet_id: 'tweet-123', - }; - - await service.sendNotificationToUserDevice('user-123', NotificationType.LIKE, payload); - - expect(mock_messaging.send).toHaveBeenCalledWith( - expect.objectContaining({ - data: { - type: NotificationType.LIKE, - ...payload, - }, - }) - ); - }); - }); - - describe('extractUsername', () => { - it('should extract username from liker for LIKE notification', () => { - const payload = { liker: { name: 'John' } }; - const username = service['extractUsername'](payload, NotificationType.LIKE); - expect(username).toBe('John'); - }); - - it('should extract username from replier for REPLY notification', () => { - const payload = { replier: { name: 'Jane' } }; - const username = service['extractUsername'](payload, NotificationType.REPLY); - expect(username).toBe('Jane'); - }); - - it('should extract username from reposter for REPOST notification', () => { - const payload = { reposter: { name: 'Bob' } }; - const username = service['extractUsername'](payload, NotificationType.REPOST); - expect(username).toBe('Bob'); - }); - - it('should extract username from quoted_by for QUOTE notification', () => { - const payload = { quoted_by: { name: 'Alice' } }; - const username = service['extractUsername'](payload, NotificationType.QUOTE); - expect(username).toBe('Alice'); - }); - - it('should extract username from mentioned_by for MENTION notification', () => { - const payload = { mentioned_by: { name: 'Charlie' } }; - const username = service['extractUsername'](payload, NotificationType.MENTION); - expect(username).toBe('Charlie'); - }); - - it('should extract username from sender for MESSAGE notification', () => { - const payload = { sender: { name: 'David' } }; - const username = service['extractUsername'](payload, NotificationType.MESSAGE); - expect(username).toBe('David'); - }); - - it('should extract follower_name for FOLLOW notification', () => { - const payload = { follower_name: 'Emma' }; - const username = service['extractUsername'](payload, NotificationType.FOLLOW); - expect(username).toBe('Emma'); - }); - - it('should return "Someone" for FOLLOW when follower_name missing', () => { - const payload = {}; - const username = service['extractUsername'](payload, NotificationType.FOLLOW); - expect(username).toBe('Someone'); - }); - - it('should return "Someone" when user field is missing', () => { - const payload = {}; - const username = service['extractUsername'](payload, NotificationType.LIKE); - expect(username).toBe('Someone'); - }); - - it('should return "Someone" when user object has no name', () => { - const payload = { liker: { id: 'user-123' } }; - const username = service['extractUsername'](payload, NotificationType.LIKE); - expect(username).toBe('Someone'); - }); - }); - - describe('getNotificationBody', () => { - it('should generate correct body for all notification types', () => { - const test_cases = [ - { - type: NotificationType.LIKE, - payload: { liker: { name: 'John' } }, - expected: 'John liked your tweet', - }, - { - type: NotificationType.REPLY, - payload: { replier: { name: 'Jane' } }, - expected: 'Jane replied to your tweet', - }, - { - type: NotificationType.REPOST, - payload: { reposter: { name: 'Bob' } }, - expected: 'Bob reposted your tweet', - }, - { - type: NotificationType.QUOTE, - payload: { quoted_by: { name: 'Alice' } }, - expected: 'Alice quoted your tweet', - }, - { - type: NotificationType.FOLLOW, - payload: { follower_name: 'Charlie' }, - expected: 'Charlie started following you', - }, - { - type: NotificationType.MENTION, - payload: { mentioned_by: { name: 'David' } }, - expected: 'David mentioned you in a tweet', - }, - { - type: NotificationType.MESSAGE, - payload: { sender: { name: 'Emma' } }, - expected: 'Emma sent you a message', - }, - ]; - - test_cases.forEach(({ type, payload, expected }) => { - const body = service['getNotificationBody'](type, payload); - expect(body).toBe(expected); - }); - }); - - it('should return generic message for unknown notification type', () => { - const body = service['getNotificationBody']('UNKNOWN' as any, {}); - expect(body).toBe('You have a new notification'); - }); - }); -}); diff --git a/src/fcm/fcm.service.ts b/src/fcm/fcm.service.ts deleted file mode 100644 index eb86fe90..00000000 --- a/src/fcm/fcm.service.ts +++ /dev/null @@ -1,143 +0,0 @@ -import { Injectable, Logger } from '@nestjs/common'; -import { InjectRepository } from '@nestjs/typeorm'; -import * as admin from 'firebase-admin'; -import { NotificationType } from 'src/notifications/enums/notification-types'; -import { User } from 'src/user/entities'; -import { Repository } from 'typeorm'; - -@Injectable() -export class FCMService { - private logger = new Logger(FCMService.name); - - constructor(@InjectRepository(User) private readonly user_repository: Repository) { - // Initialize Firebase Admin SDK - const private_key = process.env.FIREBASE_PRIVATE_KEY?.replace(/\\n/g, '\n') || ''; - - admin.initializeApp({ - credential: admin.credential.cert({ - projectId: process.env.FIREBASE_PROJECT_ID, - clientEmail: process.env.FIREBASE_CLIENT_EMAIL, - privateKey: private_key, - }), - }); - } - - async sendToDevice( - device_token: string, - data: any, - notification?: { title: string; body: string } - ) { - try { - const message: admin.messaging.Message = { - token: device_token, - data: data, - notification: notification, - }; - - const response = await admin.messaging().send(message); - this.logger.log(`FCM Sent: ${response}`); - - return response; - } catch (err) { - this.logger.error(`FCM Error: ${err.message}`); - throw err; - } - } - - async addUserDeviceToken(user_id: string, device_token: string) { - // Implementation to store the device token associated with the user - try { - await this.user_repository.update(user_id, { fcm_token: device_token }); - } catch (error) { - this.logger.error(`Error saving FCM token for user ${user_id}: ${error.message}`); - throw error; - } - } - - async removeUserDeviceToken(user_id: string) { - // Implementation to remove the device token associated with the user - try { - await this.user_repository.update(user_id, { fcm_token: null }); - } catch (error) { - this.logger.error(`Error removing FCM token for user ${user_id}: ${error.message}`); - throw error; - } - } - - async sendNotificationToUserDevice( - user_id: string, - notification_type: NotificationType, - payload: any - ): Promise { - try { - const user = await this.user_repository.findOne({ - where: { id: user_id }, - select: ['fcm_token'], - }); - - if (!user?.fcm_token) { - this.logger.warn(`No FCM token found for user ${user_id}`); - return false; - } - - const notification = { - title: `New ${notification_type.toUpperCase()}`, - body: this.getNotificationBody(notification_type, payload), - }; - - const data = { - type: notification_type, - ...payload, - }; - - await this.sendToDevice(user.fcm_token, data, notification); - this.logger.log(`Notification sent via FCM to user ${user_id}`); - return true; - } catch (error) { - this.logger.error( - `Error sending FCM notification to user ${user_id}: ${error.message}` - ); - return false; - } - } - - private extractUsername(payload: any, type: NotificationType): string { - const user_field_map = { - [NotificationType.LIKE]: 'liker', - [NotificationType.REPLY]: 'replier', - [NotificationType.REPOST]: 'reposter', - [NotificationType.QUOTE]: 'quoted_by', - [NotificationType.MENTION]: 'mentioned_by', - [NotificationType.MESSAGE]: 'sender', - [NotificationType.FOLLOW]: null, - }; - - const user_field = user_field_map[type]; - - if (type === NotificationType.FOLLOW) { - return payload.follower_name || 'Someone'; - } - - if (user_field && payload[user_field]?.name) { - return payload[user_field].name; - } - - return 'Someone'; - } - - private getNotificationBody(type: NotificationType, payload: any): string { - const username = this.extractUsername(payload, type); - - const notification_body = { - [NotificationType.LIKE]: `${username} liked your tweet`, - [NotificationType.REPLY]: `${username} replied to your tweet`, - [NotificationType.REPOST]: `${username} reposted your tweet`, - [NotificationType.QUOTE]: `${username} quoted your tweet`, - [NotificationType.FOLLOW]: `${username} started following you`, - [NotificationType.MENTION]: `${username} mentioned you in a tweet`, - [NotificationType.MESSAGE]: `${username} sent you a message`, - }; - - return notification_body[type] || 'You have a new notification'; - } -} diff --git a/src/interceptor/response.interceptor.ts b/src/interceptor/response.interceptor.ts index 5453c514..be5872e8 100644 --- a/src/interceptor/response.interceptor.ts +++ b/src/interceptor/response.interceptor.ts @@ -12,7 +12,7 @@ export interface IResponse { @Injectable() export class ResponseInterceptor implements NestInterceptor> { - constructor(private reflector: Reflector) {} + constructor(private readonly reflector: Reflector) {} intercept(context: ExecutionContext, next: CallHandler): Observable> { const custom_message = this.reflector.get( diff --git a/src/messages/messages.controller.spec.ts b/src/messages/messages.controller.spec.ts index 62503555..db9853d5 100644 --- a/src/messages/messages.controller.spec.ts +++ b/src/messages/messages.controller.spec.ts @@ -230,4 +230,171 @@ describe('MessagesController', () => { expect(result.is_deleted).toBe(true); }); }); + + describe('socketDocs', () => { + it('should return socket documentation', async () => { + const result = await controller.socketDocs(); + + expect(result).toBeDefined(); + expect(typeof result).toBe('object'); + }); + }); + + describe('uploadMessageImage', () => { + beforeEach(() => { + messages_service.uploadMessageImage = jest.fn(); + }); + + it('should upload message image successfully', async () => { + const mock_file = { + fieldname: 'file', + originalname: 'test.jpg', + encoding: '7bit', + mimetype: 'image/jpeg', + buffer: Buffer.from('test'), + size: 1024, + } as Express.Multer.File; + + const mock_result = { + image_url: 'https://storage.azure.com/images/test.jpg', + }; + + messages_service.uploadMessageImage.mockResolvedValue(mock_result); + + const result = await controller.uploadMessageImage(mock_file, mock_user_id); + + expect(messages_service.uploadMessageImage).toHaveBeenCalledWith( + mock_user_id, + mock_file + ); + expect(result).toEqual(mock_result); + }); + + it('should handle upload errors', async () => { + const mock_file = { + fieldname: 'file', + originalname: 'test.jpg', + encoding: '7bit', + mimetype: 'image/jpeg', + buffer: Buffer.from('test'), + size: 1024, + } as Express.Multer.File; + + messages_service.uploadMessageImage.mockRejectedValue(new Error('Upload failed')); + + await expect(controller.uploadMessageImage(mock_file, mock_user_id)).rejects.toThrow( + 'Upload failed' + ); + }); + }); + + describe('getMessageReactions', () => { + beforeEach(() => { + messages_service.getMessageReactions = jest.fn(); + }); + + it('should get message reactions successfully', async () => { + const mock_reactions = [ + { + emoji: 'â¤ī¸', + count: 2, + users: [ + { + id: 'user-1', + username: 'user1', + name: 'User One', + avatar_url: 'avatar1.jpg', + }, + { + id: 'user-2', + username: 'user2', + name: 'User Two', + avatar_url: 'avatar2.jpg', + }, + ], + user_reacted: true, + }, + ]; + + messages_service.getMessageReactions.mockResolvedValue(mock_reactions as any); + + const result = await controller.getMessageReactions( + mock_chat_id, + mock_message_id, + mock_user_id + ); + + expect(messages_service.getMessageReactions).toHaveBeenCalledWith( + mock_user_id, + mock_chat_id, + mock_message_id + ); + expect(result).toEqual(mock_reactions); + }); + + it('should return empty array when no reactions', async () => { + messages_service.getMessageReactions.mockResolvedValue([]); + + const result = await controller.getMessageReactions( + mock_chat_id, + mock_message_id, + mock_user_id + ); + + expect(result).toEqual([]); + }); + }); + + describe('uploadVoiceNote', () => { + beforeEach(() => { + messages_service.uploadVoiceNote = jest.fn(); + }); + + it('should upload voice note successfully', async () => { + const mock_file = { + fieldname: 'file', + originalname: 'voice.mp3', + encoding: '7bit', + mimetype: 'audio/mpeg', + buffer: Buffer.from('audio data'), + size: 5000, + } as Express.Multer.File; + + const mock_body = { duration: '45' }; + const mock_result = { + voice_note_url: 'https://storage.azure.com/voices/voice.mp3', + duration: '45', + }; + + messages_service.uploadVoiceNote.mockResolvedValue(mock_result); + + const result = await controller.uploadVoiceNote(mock_file, mock_body, mock_user_id); + + expect(messages_service.uploadVoiceNote).toHaveBeenCalledWith( + mock_user_id, + mock_file, + '45' + ); + expect(result).toEqual(mock_result); + }); + + it('should handle voice note upload errors', async () => { + const mock_file = { + fieldname: 'file', + originalname: 'voice.mp3', + encoding: '7bit', + mimetype: 'audio/mpeg', + buffer: Buffer.from('audio data'), + size: 5000, + } as Express.Multer.File; + + const mock_body = { duration: '30' }; + + messages_service.uploadVoiceNote.mockRejectedValue(new Error('File too large')); + + await expect( + controller.uploadVoiceNote(mock_file, mock_body, mock_user_id) + ).rejects.toThrow('File too large'); + }); + }); }); diff --git a/src/messages/messages.controller.ts b/src/messages/messages.controller.ts index 4e2127d3..70c2291e 100644 --- a/src/messages/messages.controller.ts +++ b/src/messages/messages.controller.ts @@ -24,7 +24,6 @@ import { } from '@nestjs/swagger'; import { ApiBadRequestErrorResponse, - ApiConflictErrorResponse, ApiForbiddenErrorResponse, ApiNotFoundErrorResponse, ApiUnauthorizedErrorResponse, @@ -32,13 +31,7 @@ import { import { GetUserId } from '../decorators/get-userId.decorator'; import { ResponseMessage } from '../decorators/response-message.decorator'; import { ERROR_MESSAGES, SUCCESS_MESSAGES } from '../constants/swagger-messages'; -import { - GetMessagesQueryDto, - SendMessageDto, - UpdateMessageDto, - UploadMessageImageDto, - UploadVoiceNoteDto, -} from './dto'; +import { GetMessagesQueryDto, SendMessageDto, UpdateMessageDto } from './dto'; import { delete_message_swagger, get_message_reactions_swagger, diff --git a/src/messages/messages.gateway.spec.ts b/src/messages/messages.gateway.spec.ts index 6117d5f9..383b9549 100644 --- a/src/messages/messages.gateway.spec.ts +++ b/src/messages/messages.gateway.spec.ts @@ -7,6 +7,7 @@ import { Server, Socket } from 'socket.io'; import { WsJwtGuard } from 'src/auth/guards/ws-jwt.guard'; import { ChatRepository } from 'src/chat/chat.repository'; import { PaginationService } from 'src/shared/services/pagination/pagination.service'; +import { MessageType } from './entities/message.entity'; describe('MessagesGateway', () => { let gateway: MessagesGateway; @@ -409,6 +410,99 @@ describe('MessagesGateway', () => { expect(result.event).toBe('error'); expect((result.data as any).message).toBe('Chat not found'); }); + + it('should validate voice message fields', async () => { + const mock_client = { + id: 'socket-123', + data: { user: { id: mock_user_id } }, + } as any; + + const result = await gateway.handleSendMessage(mock_client, { + chat_id: mock_chat_id, + message: { + content: '', + message_type: MessageType.VOICE, + voice_note_url: '', // Missing URL + voice_note_duration: '30', + } as any, + }); + + expect(result.event).toBe('error'); + expect((result.data as any).message).toContain('voice_note_url'); + }); + + it('should validate voice message duration format', async () => { + const mock_client = { + id: 'socket-123', + data: { user: { id: mock_user_id } }, + } as any; + + const result = await gateway.handleSendMessage(mock_client, { + chat_id: mock_chat_id, + message: { + content: '', + message_type: MessageType.VOICE, + voice_note_url: 'https://example.com/voice.mp3', + voice_note_duration: 'invalid', // Invalid format + } as any, + }); + + expect(result.event).toBe('error'); + expect((result.data as any).message).toContain('MM:SS format'); + }); + + it('should handle first message scenario', async () => { + const mock_client = { + id: 'socket-123', + data: { user: { id: mock_user_id } }, + } as any; + + const mock_chat = { + id: mock_chat_id, + user1_id: mock_user_id, + user2_id: 'user-999', + }; + + const mock_message = { + id: mock_message_id, + content: 'First message', + sender_id: mock_user_id, + recipient_id: 'user-999', + chat_id: mock_chat_id, + }; + + messages_service.validateChatParticipation.mockResolvedValue({ + chat: mock_chat, + participant_id: 'user-999', + } as any); + messages_service.sendMessage.mockResolvedValue(mock_message as any); + jest.spyOn(gateway as any, 'isUserInChatRoom').mockResolvedValue(false); + const emit_to_user_spy = jest + .spyOn(gateway as any, 'emitToUser') + .mockImplementation(() => {}); + + await gateway.handleSendMessage(mock_client, { + chat_id: mock_chat_id, + message: { + content: 'First message', + is_first_message: true, + } as any, + }); + + // Should emit first_message event + expect(emit_to_user_spy).toHaveBeenCalledWith( + mock_user_id, + 'first_message', + expect.any(Object), + mock_client.id + ); + expect(emit_to_user_spy).toHaveBeenCalledWith( + 'user-999', + 'first_message', + expect.any(Object), + mock_client.id + ); + }); }); describe('handleUpdateMessage', () => { @@ -529,6 +623,310 @@ describe('MessagesGateway', () => { }); }); + describe('handleTypingStart', () => { + it('should emit typing_start event to other user in chat', async () => { + const mock_client = { + id: 'socket-123', + data: { user: { id: mock_user_id } }, + to: jest.fn().mockReturnThis(), + emit: jest.fn(), + } as any; + + const mock_chat = { + id: mock_chat_id, + user1_id: mock_user_id, + user2_id: 'user-999', + }; + + messages_service.validateChatParticipation.mockResolvedValue({ + chat: mock_chat, + participant_id: 'user-999', + } as any); + + const result = await gateway.handleTypingStart(mock_client, { + chat_id: mock_chat_id, + }); + + expect(messages_service.validateChatParticipation).toHaveBeenCalledWith( + mock_user_id, + mock_chat_id + ); + expect(mock_client.to).toHaveBeenCalledWith(mock_chat_id); + expect(mock_client.emit).toHaveBeenCalledWith('user_typing', { + chat_id: mock_chat_id, + user_id: mock_user_id, + }); + expect(result.event).toBe('typing_started'); + }); + + it('should return error if validation fails', async () => { + const mock_client = { + id: 'socket-123', + data: { user: { id: mock_user_id } }, + } as any; + + messages_service.validateChatParticipation.mockRejectedValue( + new Error('Chat not found') + ); + + const result = await gateway.handleTypingStart(mock_client, { + chat_id: mock_chat_id, + }); + + expect(result.event).toBe('error'); + expect((result.data as any).message).toBe('Chat not found'); + }); + }); + + describe('handleTypingStop', () => { + it('should emit typing_stop event to other user in chat', async () => { + const mock_client = { + id: 'socket-123', + data: { user: { id: mock_user_id } }, + to: jest.fn().mockReturnThis(), + emit: jest.fn(), + } as any; + + const mock_chat = { + id: mock_chat_id, + user1_id: mock_user_id, + user2_id: 'user-999', + }; + + messages_service.validateChatParticipation.mockResolvedValue({ + chat: mock_chat, + participant_id: 'user-999', + } as any); + + const result = await gateway.handleTypingStop(mock_client, { + chat_id: mock_chat_id, + }); + + expect(messages_service.validateChatParticipation).toHaveBeenCalledWith( + mock_user_id, + mock_chat_id + ); + expect(mock_client.to).toHaveBeenCalledWith(mock_chat_id); + expect(mock_client.emit).toHaveBeenCalledWith('user_stopped_typing', { + chat_id: mock_chat_id, + user_id: mock_user_id, + }); + expect(result.event).toBe('typing_stopped'); + }); + + it('should return error if validation fails', async () => { + const mock_client = { + id: 'socket-123', + data: { user: { id: mock_user_id } }, + } as any; + + messages_service.validateChatParticipation.mockRejectedValue( + new Error('Not authorized') + ); + + const result = await gateway.handleTypingStop(mock_client, { + chat_id: mock_chat_id, + }); + + expect(result.event).toBe('error'); + }); + }); + + describe('handleGetMessages', () => { + it('should return messages for a chat', async () => { + const mock_client = { + id: 'socket-123', + data: { user: { id: mock_user_id } }, + } as any; + + const mock_messages = { + sender: { + id: 'user-999', + username: 'user2', + name: 'User Two', + avatar_url: 'avatar2.jpg', + }, + messages: [{ id: 'msg-1', content: 'Hello' }], + next_cursor: 'cursor-123', + has_more: true, + }; + + messages_service.getMessages.mockResolvedValue(mock_messages as any); + + const result = await gateway.handleGetMessages(mock_client, { + chat_id: mock_chat_id, + limit: 50, + }); + + expect(messages_service.getMessages).toHaveBeenCalledWith(mock_user_id, mock_chat_id, { + limit: 50, + cursor: undefined, + }); + expect(result.event).toBe('messages_retrieved'); + expect((result.data as any).chat_id).toBe(mock_chat_id); + expect((result as any).pagination.next_cursor).toBe('cursor-123'); + expect((result as any).pagination.has_more).toBe(true); + }); + + it('should handle cursor pagination', async () => { + const mock_client = { + id: 'socket-123', + data: { user: { id: mock_user_id } }, + } as any; + + messages_service.getMessages.mockResolvedValue({ messages: [] } as any); + + await gateway.handleGetMessages(mock_client, { + chat_id: mock_chat_id, + limit: 20, + cursor: 'cursor-abc', + }); + + expect(messages_service.getMessages).toHaveBeenCalledWith(mock_user_id, mock_chat_id, { + limit: 20, + cursor: 'cursor-abc', + }); + }); + + it('should return error if get messages fails', async () => { + const mock_client = { + id: 'socket-123', + data: { user: { id: mock_user_id } }, + } as any; + + messages_service.getMessages.mockRejectedValue(new Error('Database error')); + + const result = await gateway.handleGetMessages(mock_client, { + chat_id: mock_chat_id, + limit: 50, + }); + + expect(result.event).toBe('error'); + expect((result.data as any).message).toBe('Database error'); + }); + }); + + describe('handleAddReaction', () => { + it('should add reaction to message successfully', async () => { + const mock_client = { + id: 'socket-123', + data: { user: { id: mock_user_id } }, + } as any; + + const mock_reaction = { + id: 'reaction-1', + message_id: mock_message_id, + user_id: mock_user_id, + emoji: 'â¤ī¸', + created_at: new Date(), + }; + + messages_service.addReaction = jest.fn().mockResolvedValue(mock_reaction); + jest.spyOn(gateway.server, 'to').mockReturnThis(); + jest.spyOn(gateway.server, 'emit'); + + const result = await gateway.handleAddReaction(mock_client, { + chat_id: mock_chat_id, + message_id: mock_message_id, + emoji: 'â¤ī¸', + }); + + expect(messages_service.addReaction).toHaveBeenCalledWith( + mock_user_id, + mock_chat_id, + mock_message_id, + { emoji: 'â¤ī¸' } + ); + expect(gateway.server.to).toHaveBeenCalledWith(mock_chat_id); + expect(gateway.server.emit).toHaveBeenCalledWith('reaction_added', { + chat_id: mock_chat_id, + message_id: mock_message_id, + user_id: mock_user_id, + emoji: 'â¤ī¸', + created_at: mock_reaction.created_at, + }); + expect(result.event).toBe('reaction_added'); + }); + + it('should return error if add reaction fails', async () => { + const mock_client = { + id: 'socket-123', + data: { user: { id: mock_user_id } }, + } as any; + + messages_service.addReaction = jest + .fn() + .mockRejectedValue(new Error('Reaction already exists')); + + const result = await gateway.handleAddReaction(mock_client, { + chat_id: mock_chat_id, + message_id: mock_message_id, + emoji: '👍', + }); + + expect(result.event).toBe('error'); + expect((result.data as any).message).toBe('Reaction already exists'); + }); + }); + + describe('handleRemoveReaction', () => { + it('should remove reaction from message successfully', async () => { + const mock_client = { + id: 'socket-123', + data: { user: { id: mock_user_id } }, + } as any; + + const mock_response = { + message: 'Reaction removed successfully', + }; + + messages_service.removeReaction = jest.fn().mockResolvedValue(mock_response); + jest.spyOn(gateway.server, 'to').mockReturnThis(); + jest.spyOn(gateway.server, 'emit'); + + const result = await gateway.handleRemoveReaction(mock_client, { + chat_id: mock_chat_id, + message_id: mock_message_id, + emoji: 'â¤ī¸', + }); + + expect(messages_service.removeReaction).toHaveBeenCalledWith( + mock_user_id, + mock_chat_id, + mock_message_id, + { emoji: 'â¤ī¸' } + ); + expect(gateway.server.to).toHaveBeenCalledWith(mock_chat_id); + expect(gateway.server.emit).toHaveBeenCalledWith('reaction_removed', { + chat_id: mock_chat_id, + message_id: mock_message_id, + user_id: mock_user_id, + emoji: 'â¤ī¸', + }); + expect(result.event).toBe('reaction_removed'); + }); + + it('should return error if remove reaction fails', async () => { + const mock_client = { + id: 'socket-123', + data: { user: { id: mock_user_id } }, + } as any; + + messages_service.removeReaction = jest + .fn() + .mockRejectedValue(new Error('Reaction not found')); + + const result = await gateway.handleRemoveReaction(mock_client, { + chat_id: mock_chat_id, + message_id: mock_message_id, + emoji: '👍', + }); + + expect(result.event).toBe('error'); + expect((result.data as any).message).toBe('Reaction not found'); + }); + }); + describe('gateway initialization', () => { it('should be defined', () => { expect(gateway).toBeDefined(); diff --git a/src/messages/messages.gateway.ts b/src/messages/messages.gateway.ts index 500975e5..22d9ba2e 100644 --- a/src/messages/messages.gateway.ts +++ b/src/messages/messages.gateway.ts @@ -5,14 +5,13 @@ import { ChatRepository } from 'src/chat/chat.repository'; import { GetMessagesQueryDto, SendMessageDto, UpdateMessageDto } from './dto'; import { MessageType } from './entities/message.entity'; import { PaginationService } from 'src/shared/services/pagination/pagination.service'; -import { path } from '@ffmpeg-installer/ffmpeg'; import { MESSAGE_CONTENT_LENGTH } from 'src/constants/variables'; @Injectable() export class MessagesGateway { server: Server; // Store active connections: user_id -> socket_id[] - private userSockets = new Map>(); + private readonly userSockets = new Map>(); constructor( private readonly messages_service: MessagesService, diff --git a/src/messages/messages.module.ts b/src/messages/messages.module.ts index 99506e33..22be610f 100644 --- a/src/messages/messages.module.ts +++ b/src/messages/messages.module.ts @@ -11,7 +11,7 @@ import { MessageReaction } from './entities/message-reaction.entity'; import { Chat } from 'src/chat/entities/chat.entity'; import { MessagesGateway } from './messages.gateway'; import { ChatModule } from 'src/chat/chat.module'; -import { FcmModule } from 'src/fcm/fcm.module'; +import { FcmModule } from 'src/expo/expo.module'; import { BackgroundJobsModule } from 'src/background-jobs'; import { AzureStorageModule } from 'src/azure-storage/azure-storage.module'; diff --git a/src/messages/messages.repository.ts b/src/messages/messages.repository.ts index 649acaa2..335a5f77 100644 --- a/src/messages/messages.repository.ts +++ b/src/messages/messages.repository.ts @@ -1,6 +1,6 @@ import { Injectable, InternalServerErrorException } from '@nestjs/common'; -import { DataSource, LessThan, Not, Repository } from 'typeorm'; -import { GetMessagesQueryDto, SendMessageDto, UpdateMessageDto } from './dto'; +import { DataSource, Repository } from 'typeorm'; +import { GetMessagesQueryDto, SendMessageDto } from './dto'; import { Message, MessageType } from './entities/message.entity'; import { ERROR_MESSAGES } from 'src/constants/swagger-messages'; import { PaginationService } from '../shared/services/pagination/pagination.service'; @@ -10,9 +10,9 @@ import { Chat } from 'src/chat/entities/chat.entity'; @Injectable() export class MessageRepository extends Repository { constructor( - private data_source: DataSource, - private pagination_service: PaginationService, - private encryption_service: EncryptionService + private readonly data_source: DataSource, + private readonly pagination_service: PaginationService, + private readonly encryption_service: EncryptionService ) { super(Message, data_source.createEntityManager()); } diff --git a/src/messages/messages.service.spec.ts b/src/messages/messages.service.spec.ts index f4c8f23b..a249aa89 100644 --- a/src/messages/messages.service.spec.ts +++ b/src/messages/messages.service.spec.ts @@ -8,7 +8,7 @@ import { PaginationService } from 'src/shared/services/pagination/pagination.ser import { BadRequestException, ForbiddenException, NotFoundException } from '@nestjs/common'; import { ERROR_MESSAGES } from '../constants/swagger-messages'; import { MessageType } from './entities/message.entity'; -import { FCMService } from '../fcm/fcm.service'; +import { FCMService } from '../expo/expo.service'; import { MessagesGateway } from './messages.gateway'; import { MessageJobService } from '../background-jobs/notifications/message/message.service'; import { EncryptionService } from '../shared/services/encryption/encryption.service'; @@ -119,6 +119,8 @@ describe('MessagesService', () => { useValue: { uploadFromUrl: jest.fn(), deleteBlob: jest.fn(), + generateFileName: jest.fn(), + uploadFile: jest.fn(), }, }, { @@ -666,4 +668,157 @@ describe('MessagesService', () => { expect((result[0] as any).user_reacted).toBe(true); }); }); + + describe('uploadVoiceNote', () => { + const mock_duration = '30'; + + it('should throw BadRequestException if file not provided', async () => { + await expect( + service.uploadVoiceNote(mock_user_id, null as any, mock_duration) + ).rejects.toThrow(BadRequestException); + }); + + it('should throw BadRequestException if file buffer is missing', async () => { + const file_without_buffer = { + fieldname: 'file', + originalname: 'voice.mp3', + encoding: '7bit', + mimetype: 'audio/mpeg', + size: 1024, + } as any; + + await expect( + service.uploadVoiceNote(mock_user_id, file_without_buffer, mock_duration) + ).rejects.toThrow(BadRequestException); + }); + + it('should throw BadRequestException for invalid voice file format', async () => { + const invalid_file = { + fieldname: 'file', + originalname: 'test.txt', + encoding: '7bit', + mimetype: 'text/plain', + buffer: Buffer.from('test'), + size: 1024, + } as any; + + await expect( + service.uploadVoiceNote(mock_user_id, invalid_file, mock_duration) + ).rejects.toThrow(BadRequestException); + }); + + it('should throw BadRequestException if voice file too large', async () => { + const large_file = { + fieldname: 'file', + originalname: 'voice.mp3', + encoding: '7bit', + mimetype: 'audio/mpeg', + buffer: Buffer.from('test'), + size: 100 * 1024 * 1024, // 100MB + } as any; + + await expect( + service.uploadVoiceNote(mock_user_id, large_file, mock_duration) + ).rejects.toThrow(BadRequestException); + }); + + it('should upload voice note successfully', async () => { + const valid_file = { + fieldname: 'file', + originalname: 'voice.mp3', + encoding: '7bit', + mimetype: 'audio/mpeg', + buffer: Buffer.from('test audio data'), + size: 1024 * 500, // 500KB + } as any; + + const mock_voice_url = 'https://storage.azure.com/voices/voice-123.mp3'; + const azure_service = (service as any).azure_storage_service; + jest.spyOn(azure_service, 'generateFileName').mockReturnValue('voice-123.mp3'); + jest.spyOn(azure_service, 'uploadFile').mockResolvedValue(mock_voice_url); + + const result = await service.uploadVoiceNote(mock_user_id, valid_file, mock_duration); + + expect(result.voice_note_url).toBe(mock_voice_url); + expect(result.duration).toBe(mock_duration); + expect(azure_service.generateFileName).toHaveBeenCalledWith(mock_user_id, 'voice.mp3'); + }); + + it('should throw BadRequestException if upload fails', async () => { + const valid_file = { + fieldname: 'file', + originalname: 'voice.mp3', + encoding: '7bit', + mimetype: 'audio/mpeg', + buffer: Buffer.from('test audio data'), + size: 1024 * 500, + } as any; + + const azure_service = (service as any).azure_storage_service; + jest.spyOn(azure_service, 'generateFileName').mockReturnValue('voice-123.mp3'); + jest.spyOn(azure_service, 'uploadFile').mockRejectedValue(new Error('Upload failed')); + + await expect( + service.uploadVoiceNote(mock_user_id, valid_file, mock_duration) + ).rejects.toThrow(BadRequestException); + }); + }); + + describe('sendVoiceMessage', () => { + it('should send voice message successfully', async () => { + const voice_url = 'https://storage.azure.com/voices/voice-123.mp3'; + const duration = '45'; + + chat_repository.findOne.mockResolvedValue(mock_chat as any); + message_repository.createMessage.mockResolvedValue({ + ...mock_message, + message_type: MessageType.VOICE, + voice_note_url: voice_url, + voice_note_duration: duration, + } as any); + + const result = await service.sendVoiceMessage( + mock_user_id, + mock_chat_id, + voice_url, + duration + ); + + expect(message_repository.createMessage).toHaveBeenCalledWith( + mock_user_id, + mock_chat_id, + expect.objectContaining({ + content: '', + message_type: MessageType.VOICE, + voice_note_url: voice_url, + voice_note_duration: duration, + is_first_message: false, + }), + false + ); + expect(result.message_type).toBe(MessageType.VOICE); + }); + + it('should send voice message as first message', async () => { + const voice_url = 'https://storage.azure.com/voices/voice-123.mp3'; + const duration = '30'; + + chat_repository.findOne.mockResolvedValue(mock_chat as any); + message_repository.createMessage.mockResolvedValue({ + ...mock_message, + message_type: MessageType.VOICE, + } as any); + + await service.sendVoiceMessage(mock_user_id, mock_chat_id, voice_url, duration, true); + + expect(message_repository.createMessage).toHaveBeenCalledWith( + mock_user_id, + mock_chat_id, + expect.objectContaining({ + is_first_message: true, + }), + false + ); + }); + }); }); diff --git a/src/messages/messages.service.ts b/src/messages/messages.service.ts index d099a7de..a80443fb 100644 --- a/src/messages/messages.service.ts +++ b/src/messages/messages.service.ts @@ -21,7 +21,7 @@ import { MessageType } from './entities/message.entity'; import { ChatRepository } from 'src/chat/chat.repository'; import { PaginationService } from 'src/shared/services/pagination/pagination.service'; import { EncryptionService } from 'src/shared/services/encryption/encryption.service'; -import { FCMService } from 'src/fcm/fcm.service'; +import { FCMService } from 'src/expo/expo.service'; import { NotificationType } from 'src/notifications/enums/notification-types'; import { MessagesGateway } from './messages.gateway'; import { MessageJobService } from 'src/background-jobs/notifications/message/message.service'; @@ -31,16 +31,14 @@ import { ALLOWED_IMAGE_MIME_TYPES, ALLOWED_VOICE_MIME_TYPES, MAX_IMAGE_FILE_SIZE, - MAX_VOICE_DURATION, MAX_VOICE_FILE_SIZE, - MIN_VOICE_DURATION, } from 'src/constants/variables'; import { MessageReactionRepository } from './message-reaction.repository'; @Injectable() export class MessagesService { - private message_images_container: string; - private message_voices_container: string; + private readonly message_images_container: string; + private readonly message_voices_container: string; constructor( private readonly message_repository: MessageRepository, @@ -149,6 +147,7 @@ export class MessagesService { sender: { name: sender.name, username: sender.username, + chat_id, }, } ); diff --git a/src/messages/messages.swagger.ts b/src/messages/messages.swagger.ts index 177af39a..6659055c 100644 --- a/src/messages/messages.swagger.ts +++ b/src/messages/messages.swagger.ts @@ -411,19 +411,9 @@ Send a new message in a chat. Supports text, reply, image, and voice messages. "content": "Hello, how are you?", "reply_to_message_id": "msg_789def-012abc-345ghi", "message_type": "text", - image_url: null, - } -} -\`\`\` - -**Emit (Voice Message):** -\`\`\`json -{ - "chat_id": "chat_123abc-def456-789ghi", - "message": { - "message_type": "voice", - "voice_note_url": "https://yapperdev.blob.core.windows.net/message-voices/...", - "voice_note_duration": "4:33" + "is_first_message": false, + "reply_to_message_id": null, + "image_url": null } } \`\`\` diff --git a/src/migrations/1765344529881-voice_note.ts b/src/migrations/1765344529881-voice_note.ts deleted file mode 100644 index bdf25c0e..00000000 --- a/src/migrations/1765344529881-voice_note.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { MigrationInterface, QueryRunner } from 'typeorm'; - -export class VoiceNote1765344529881 implements MigrationInterface { - name = 'VoiceNote1765344529881'; - - public async up(query_runner: QueryRunner): Promise { - await query_runner.query(`ALTER TABLE "messages" ADD "voice_note_url" text`); - await query_runner.query(`ALTER TABLE "messages" ADD "voice_note_duration" text`); - await query_runner.query( - `ALTER TYPE "public"."messages_message_type_enum" RENAME TO "messages_message_type_enum_old"` - ); - await query_runner.query( - `CREATE TYPE "public"."messages_message_type_enum" AS ENUM('text', 'reply', 'voice')` - ); - await query_runner.query(`ALTER TABLE "messages" ALTER COLUMN "message_type" DROP DEFAULT`); - await query_runner.query( - `ALTER TABLE "messages" ALTER COLUMN "message_type" TYPE "public"."messages_message_type_enum" USING "message_type"::"text"::"public"."messages_message_type_enum"` - ); - await query_runner.query( - `ALTER TABLE "messages" ALTER COLUMN "message_type" SET DEFAULT 'text'` - ); - await query_runner.query(`DROP TYPE "public"."messages_message_type_enum_old"`); - } - - public async down(query_runner: QueryRunner): Promise { - await query_runner.query( - `CREATE TYPE "public"."messages_message_type_enum_old" AS ENUM('text', 'reply')` - ); - await query_runner.query(`ALTER TABLE "messages" ALTER COLUMN "message_type" DROP DEFAULT`); - await query_runner.query( - `ALTER TABLE "messages" ALTER COLUMN "message_type" TYPE "public"."messages_message_type_enum_old" USING "message_type"::"text"::"public"."messages_message_type_enum_old"` - ); - await query_runner.query( - `ALTER TABLE "messages" ALTER COLUMN "message_type" SET DEFAULT 'text'` - ); - await query_runner.query(`DROP TYPE "public"."messages_message_type_enum"`); - await query_runner.query( - `ALTER TYPE "public"."messages_message_type_enum_old" RENAME TO "messages_message_type_enum"` - ); - await query_runner.query(`ALTER TABLE "messages" DROP COLUMN "voice_note_duration"`); - await query_runner.query(`ALTER TABLE "messages" DROP COLUMN "voice_note_url"`); - } -} diff --git a/src/notifications/dto/base-notification.dto.ts b/src/notifications/dto/base-notification.dto.ts deleted file mode 100644 index dc6d63f5..00000000 --- a/src/notifications/dto/base-notification.dto.ts +++ /dev/null @@ -1,54 +0,0 @@ -// import { ApiProperty, ApiPropertyOptional } from '@nestjs/swagger'; -// import { NotificationType } from '../enums/notification-types'; - -// export class BaseNotificationDto { -// @ApiProperty({ -// description: 'Type of the notification (e.g., LIKE, COMMENT, FOLLOW)', -// example: NotificationType.LIKE, -// enum: NotificationType, -// }) -// type: string; - -// @ApiProperty({ -// description: 'Creation timestamp of this notification', -// example: '2025-10-15T18:30:00.000Z', -// type: String, -// format: 'date-time', -// }) -// created_at: string; - -// @ApiProperty({ -// description: 'List of trigger IDs associated with this notification (UUIDv4)', -// example: ['d290f1ee-6c54-4b01-90e6-d701748f0851', 'eac8b334-70b9-4de4-8019-3946eae8b1e5'], -// type: [String], -// }) -// trigger_ids: string[]; - -// @ApiProperty({ -// description: 'List of user IDs associated with this notification (UUIDv4)', -// example: ['d290f1ee-6c54-4b01-90e6-d701748f0851', 'eac8b334-70b9-4de4-8019-3946eae8b1e5'], -// type: [String], -// }) -// user_ids: string[]; - -// @ApiProperty({ -// description: 'Human-readable message for the notification', -// example: 'Ahmed and 3 others liked your post', -// type: String, -// }) -// content: string; - -// @ApiProperty({ -// description: 'Flag to know whether notification is seen or not', -// example: false, -// type: Boolean, -// }) -// seen: boolean = false; - -// @ApiPropertyOptional({ -// description: 'Message sent in case the trigger is a message from another user', -// example: 'Hello!', -// type: String, -// }) -// chatMessage?: string; -// } diff --git a/src/notifications/dto/follow-notification.dto.ts b/src/notifications/dto/follow-notification.dto.ts index 4715b9c3..495b86c4 100644 --- a/src/notifications/dto/follow-notification.dto.ts +++ b/src/notifications/dto/follow-notification.dto.ts @@ -3,6 +3,13 @@ import { NotificationType } from '../enums/notification-types'; import { User } from 'src/user/entities'; export class FollowNotificationDto { + @ApiProperty({ + description: 'Notification ID', + example: '507f1f77bcf86cd799439011', + type: String, + }) + id: string; + @ApiProperty({ description: 'Notification type', example: NotificationType.FOLLOW, diff --git a/src/notifications/dto/like-notification.dto.ts b/src/notifications/dto/like-notification.dto.ts index a785c6c0..a43d856b 100644 --- a/src/notifications/dto/like-notification.dto.ts +++ b/src/notifications/dto/like-notification.dto.ts @@ -4,6 +4,13 @@ import { User } from 'src/user/entities'; import { Tweet } from 'src/tweets/entities'; export class LikeNotificationDto { + @ApiProperty({ + description: 'Notification ID', + example: '507f1f77bcf86cd799439011', + type: String, + }) + id: string; + @ApiProperty({ description: 'Notification type', example: NotificationType.LIKE, diff --git a/src/notifications/dto/mention-notification.dto.ts b/src/notifications/dto/mention-notification.dto.ts index 0c7ca942..ef13441d 100644 --- a/src/notifications/dto/mention-notification.dto.ts +++ b/src/notifications/dto/mention-notification.dto.ts @@ -3,6 +3,13 @@ import { NotificationType } from '../enums/notification-types'; import { TweetResponseDTO, UserResponseDTO } from 'src/tweets/dto'; export class MentionNotificationDto { + @ApiProperty({ + description: 'Notification ID', + example: '507f1f77bcf86cd799439011', + type: String, + }) + id: string; + @ApiProperty({ example: NotificationType.MENTION, enum: [NotificationType.MENTION], diff --git a/src/notifications/dto/message-notification.dto.ts b/src/notifications/dto/message-notification.dto.ts new file mode 100644 index 00000000..6d0279a6 --- /dev/null +++ b/src/notifications/dto/message-notification.dto.ts @@ -0,0 +1,43 @@ +import { ApiProperty } from '@nestjs/swagger'; +import { NotificationType } from '../enums/notification-types'; +import { UserResponseDTO } from 'src/tweets/dto'; + +export class MessageNotificationDto { + @ApiProperty({ + description: 'Notification ID', + example: '507f1f77bcf86cd799439011', + type: String, + }) + id: string; + + @ApiProperty({ + example: NotificationType.MESSAGE, + enum: [NotificationType.MESSAGE], + description: 'Type of notification', + }) + type: NotificationType.MESSAGE; + + @ApiProperty({ + example: '2025-11-29T08:45:00.000Z', + description: 'Timestamp when the notification was created', + }) + created_at: Date; + + @ApiProperty({ + description: 'User who sent the message', + type: UserResponseDTO, + }) + sender: UserResponseDTO; + + @ApiProperty({ + example: '123e4567-e89b-12d3-a456-426614174000', + description: 'ID of the message', + }) + message_id: string; + + @ApiProperty({ + example: '123e4567-e89b-12d3-a456-426614174001', + description: 'ID of the chat', + }) + chat_id: string; +} diff --git a/src/notifications/dto/notifications-response.dto.ts b/src/notifications/dto/notifications-response.dto.ts index fbb79f38..5903757e 100644 --- a/src/notifications/dto/notifications-response.dto.ts +++ b/src/notifications/dto/notifications-response.dto.ts @@ -5,6 +5,7 @@ import { ReplyNotificationDto } from './reply-notification.dto'; import { RepostNotificationDto } from './repost-notification.dto'; import { QuoteNotificationDto } from './quote-notification.dto'; import { MentionNotificationDto } from './mention-notification.dto'; +import { MessageNotificationDto } from './message-notification.dto'; export type NotificationDto = | FollowNotificationDto @@ -12,7 +13,8 @@ export type NotificationDto = | ReplyNotificationDto | RepostNotificationDto | QuoteNotificationDto - | MentionNotificationDto; + | MentionNotificationDto + | MessageNotificationDto; export class NotificationsResponseDto { @ApiProperty({ @@ -26,10 +28,12 @@ export class NotificationsResponseDto { { $ref: '#/components/schemas/RepostNotificationDto' }, { $ref: '#/components/schemas/QuoteNotificationDto' }, { $ref: '#/components/schemas/MentionNotificationDto' }, + { $ref: '#/components/schemas/MessageNotificationDto' }, ], }, example: [ { + id: '507f1f77bcf86cd799439011', type: 'like', created_at: '2025-11-29T10:30:00.000Z', likers: [ @@ -60,6 +64,7 @@ export class NotificationsResponseDto { }, }, { + id: '507f1f77bcf86cd799439012', type: 'follow', created_at: '2025-11-29T09:15:00.000Z', followers: [ @@ -73,6 +78,7 @@ export class NotificationsResponseDto { ], }, { + id: '507f1f77bcf86cd799439013', type: 'reply', created_at: '2025-11-29T08:45:00.000Z', replier: { @@ -119,6 +125,7 @@ export class NotificationsResponseDto { conversation_id: '623e4567-e89b-12d3-a456-426614174007', }, { + id: '507f1f77bcf86cd799439014', type: 'repost', created_at: '2025-11-29T08:00:00.000Z', reposters: [ @@ -149,6 +156,7 @@ export class NotificationsResponseDto { }, }, { + id: '507f1f77bcf86cd799439015', type: 'quote', created_at: '2025-11-29T07:30:00.000Z', quoter: { diff --git a/src/notifications/dto/quote-notification.dto.ts b/src/notifications/dto/quote-notification.dto.ts index 5a55ce03..7ee65769 100644 --- a/src/notifications/dto/quote-notification.dto.ts +++ b/src/notifications/dto/quote-notification.dto.ts @@ -4,6 +4,13 @@ import { User } from 'src/user/entities'; import { Tweet } from 'src/tweets/entities'; export class QuoteNotificationDto { + @ApiProperty({ + description: 'Notification ID', + example: '507f1f77bcf86cd799439011', + type: String, + }) + id: string; + @ApiProperty({ description: 'Notification type', example: NotificationType.QUOTE, diff --git a/src/notifications/dto/reply-notification.dto.ts b/src/notifications/dto/reply-notification.dto.ts index c2229aaa..787ca18f 100644 --- a/src/notifications/dto/reply-notification.dto.ts +++ b/src/notifications/dto/reply-notification.dto.ts @@ -4,6 +4,13 @@ import { User } from 'src/user/entities'; import { Tweet } from 'src/tweets/entities'; export class ReplyNotificationDto { + @ApiProperty({ + description: 'Notification ID', + example: '507f1f77bcf86cd799439011', + type: String, + }) + id: string; + @ApiProperty({ description: 'Notification type', example: NotificationType.REPLY, diff --git a/src/notifications/dto/repost-notification.dto.ts b/src/notifications/dto/repost-notification.dto.ts index 5ee19b35..34557ced 100644 --- a/src/notifications/dto/repost-notification.dto.ts +++ b/src/notifications/dto/repost-notification.dto.ts @@ -4,6 +4,13 @@ import { User } from 'src/user/entities'; import { Tweet } from 'src/tweets/entities'; export class RepostNotificationDto { + @ApiProperty({ + description: 'Notification ID', + example: '507f1f77bcf86cd799439011', + type: String, + }) + id: string; + @ApiProperty({ description: 'Notification type', example: NotificationType.REPOST, diff --git a/src/notifications/entities/base-notification.entity.ts b/src/notifications/entities/base-notification.entity.ts index 1b429703..f73dc364 100644 --- a/src/notifications/entities/base-notification.entity.ts +++ b/src/notifications/entities/base-notification.entity.ts @@ -1,8 +1,12 @@ import { Prop, Schema, SchemaFactory } from '@nestjs/mongoose'; import { NotificationType } from '../enums/notification-types'; +import { Types } from 'mongoose'; -@Schema({ _id: false, timestamps: false }) +@Schema({ timestamps: false }) export abstract class BaseNotificationEntity { + @Prop({ type: Types.ObjectId, auto: true }) + _id?: Types.ObjectId; + @Prop({ type: String, enum: NotificationType, required: true }) type: NotificationType; diff --git a/src/notifications/entities/notifications.entity.ts b/src/notifications/entities/notifications.entity.ts index df6ddef7..da783f98 100644 --- a/src/notifications/entities/notifications.entity.ts +++ b/src/notifications/entities/notifications.entity.ts @@ -1,5 +1,5 @@ import { Prop, Schema, SchemaFactory } from '@nestjs/mongoose'; -import { Document, Types } from 'mongoose'; +import { Document } from 'mongoose'; import { BaseNotificationEntity } from './base-notification.entity'; @Schema({ collection: 'notifications', timestamps: true }) diff --git a/src/notifications/notifications.controller.ts b/src/notifications/notifications.controller.ts index 10c102e2..cc4db509 100644 --- a/src/notifications/notifications.controller.ts +++ b/src/notifications/notifications.controller.ts @@ -17,6 +17,7 @@ import { ReplyNotificationDto } from './dto/reply-notification.dto'; import { RepostNotificationDto } from './dto/repost-notification.dto'; import { QuoteNotificationDto } from './dto/quote-notification.dto'; import { MentionNotificationDto } from './dto/mention-notification.dto'; +import { MessageNotificationDto } from './dto/message-notification.dto'; import { get_mentions_and_replies_swagger, get_user_notifications_swagger, @@ -35,7 +36,8 @@ import { ERROR_MESSAGES } from 'src/constants/swagger-messages'; ReplyNotificationDto, RepostNotificationDto, QuoteNotificationDto, - MentionNotificationDto + MentionNotificationDto, + MessageNotificationDto ) @Controller('notifications') export class NotificationsController { diff --git a/src/notifications/notifications.module.ts b/src/notifications/notifications.module.ts index 6abd01bb..6fe1b7ff 100644 --- a/src/notifications/notifications.module.ts +++ b/src/notifications/notifications.module.ts @@ -9,14 +9,16 @@ import { TypeOrmModule } from '@nestjs/typeorm'; import { User } from 'src/user/entities'; import { Tweet } from 'src/tweets/entities'; import { BackgroundJobsModule } from 'src/background-jobs'; -import { FcmModule } from 'src/fcm/fcm.module'; +import { FcmModule } from 'src/expo/expo.module'; import { MessagesModule } from 'src/messages/messages.module'; +import { TweetsModule } from 'src/tweets/tweets.module'; @Module({ imports: [ MongodbModule, MongooseModule.forFeature([{ name: Notification.name, schema: NotificationSchema }]), TypeOrmModule.forFeature([User, Tweet]), + forwardRef(() => TweetsModule), forwardRef(() => BackgroundJobsModule), forwardRef(() => FcmModule), forwardRef(() => MessagesModule), diff --git a/src/notifications/notifications.service.spec.ts b/src/notifications/notifications.service.spec.ts index 34f1f4d3..8046f1bc 100644 --- a/src/notifications/notifications.service.spec.ts +++ b/src/notifications/notifications.service.spec.ts @@ -1,93 +1,159 @@ import { Test, TestingModule } from '@nestjs/testing'; import { getModelToken } from '@nestjs/mongoose'; import { getRepositoryToken } from '@nestjs/typeorm'; -import { Model } from 'mongoose'; +import { Model, Types } from 'mongoose'; import { NotificationsService } from './notifications.service'; import { Notification } from './entities/notifications.entity'; import { NotificationsGateway } from './notifications.gateway'; import { User } from '../user/entities/user.entity'; import { Tweet } from '../tweets/entities/tweet.entity'; import { ClearJobService } from '../background-jobs/notifications/clear/clear.service'; -import { FCMService } from '../fcm/fcm.service'; +import { FCMService } from '../expo/expo.service'; import { MessagesGateway } from '../messages/messages.gateway'; +import { NotificationType } from './enums/notification-types'; +import { FollowNotificationEntity } from './entities/follow-notification.entity'; +import { LikeNotificationEntity } from './entities/like-notification.entity'; +import { RepostNotificationEntity } from './entities/repost-notification.entity'; +import { ReplyNotificationEntity } from './entities/reply-notification.entity'; +import { MentionNotificationEntity } from './entities/mention-notification.entity'; +import { QuoteNotificationEntity } from './entities/quote-notification.entity'; describe('NotificationsService', () => { let service: NotificationsService; - let notification_model: jest.Mocked>; + let notification_model: any; + let notifications_gateway: any; + let user_repository: any; + let tweet_repository: any; + let clear_job_service: any; + let fcm_service: any; + let messages_gateway: any; + + const mock_user = { + id: 'user-123', + username: 'testuser', + name: 'Test User', + email: 'test@example.com', + avatar_url: 'https://example.com/avatar.jpg', + }; + + const mock_tweet = { + tweet_id: 'tweet-123', + content: 'Test tweet content', + user: mock_user, + user_id: 'user-123', + }; const mock_notification = { user: 'user-123', notifications: [ { - type: 'follow', - follower_id: 'user-456', - follower_name: 'John Doe', + _id: new Types.ObjectId(), + type: NotificationType.FOLLOW, + follower_id: ['user-456'], created_at: new Date(), - seen: false, }, ], + newest_count: 1, }; beforeEach(async () => { + const mock_query_builder = { + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([]), + createQueryBuilder: jest.fn().mockReturnThis(), + }; + + notification_model = { + updateOne: jest.fn().mockResolvedValue({ acknowledged: true }), + findOne: jest.fn().mockReturnValue({ + lean: jest.fn().mockReturnValue({ + exec: jest.fn().mockResolvedValue(mock_notification), + }), + }), + findOneAndUpdate: jest.fn().mockResolvedValue(mock_notification), + find: jest.fn(), + create: jest.fn(), + save: jest.fn(), + }; + + notifications_gateway = { + setNotificationsService: jest.fn(), + sendToUser: jest.fn(), + }; + + user_repository = { + findOne: jest.fn().mockResolvedValue(mock_user), + find: jest.fn().mockResolvedValue([mock_user]), + save: jest.fn(), + createQueryBuilder: jest.fn().mockReturnValue(mock_query_builder), + metadata: { + columns: [ + { propertyName: 'id' }, + { propertyName: 'username' }, + { propertyName: 'name' }, + { propertyName: 'email' }, + { propertyName: 'avatar_url' }, + ], + }, + }; + + tweet_repository = { + findOne: jest.fn().mockResolvedValue(mock_tweet), + find: jest.fn().mockResolvedValue([mock_tweet]), + save: jest.fn(), + createQueryBuilder: jest.fn().mockReturnValue(mock_query_builder), + }; + + clear_job_service = { + queueClearNotification: jest.fn().mockResolvedValue({ success: true }), + queueClearNotificationByUsers: jest.fn().mockResolvedValue({ success: true }), + }; + + fcm_service = { + sendNotificationToUserDevice: jest.fn().mockResolvedValue(true), + }; + + messages_gateway = { + isOnline: jest.fn().mockReturnValue(false), + }; + const module: TestingModule = await Test.createTestingModule({ providers: [ NotificationsService, { provide: getModelToken(Notification.name), - useValue: { - updateOne: jest.fn(), - findOne: jest.fn(), - find: jest.fn(), - create: jest.fn(), - save: jest.fn(), - }, + useValue: notification_model, }, { provide: NotificationsGateway, - useValue: { - setNotificationsService: jest.fn(), - sendNotificationToUser: jest.fn(), - }, + useValue: notifications_gateway, }, { provide: getRepositoryToken(User), - useValue: { - findOne: jest.fn(), - save: jest.fn(), - }, + useValue: user_repository, }, { provide: getRepositoryToken(Tweet), - useValue: { - findOne: jest.fn(), - save: jest.fn(), - }, + useValue: tweet_repository, }, { provide: ClearJobService, - useValue: { - queueClearNotification: jest.fn(), - }, + useValue: clear_job_service, }, { provide: FCMService, - useValue: { - sendNotificationToUserDevice: jest.fn(), - addUserDeviceToken: jest.fn(), - removeUserDeviceToken: jest.fn(), - }, + useValue: fcm_service, }, { provide: MessagesGateway, - useValue: { - sendMessageNotificationToUser: jest.fn(), - }, + useValue: messages_gateway, }, ], }).compile(); service = module.get(NotificationsService); - notification_model = module.get(getModelToken(Notification.name)); }); afterEach(() => { @@ -97,4 +163,2145 @@ describe('NotificationsService', () => { it('should be defined', () => { expect(service).toBeDefined(); }); + + describe('onModuleInit', () => { + it('should set notifications service on gateway', () => { + service.onModuleInit(); + expect(notifications_gateway.setNotificationsService).toHaveBeenCalledWith(service); + }); + }); + + describe('saveNotificationAndSend', () => { + it('should save and send a FOLLOW notification when not aggregated', async () => { + const notification_data: FollowNotificationEntity = { + type: NotificationType.FOLLOW, + follower_id: ['user-456'], + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + const payload = { + follower_id: 'user-456', + follower_username: 'follower', + follower_name: 'Follower User', + follower_avatar_url: 'https://example.com/follower.jpg', + }; + + jest.spyOn(service as any, 'tryAggregateNotification').mockResolvedValue({ + aggregated: false, + }); + + await service.saveNotificationAndSend('user-123', notification_data, payload); + + expect(notification_model.updateOne).toHaveBeenCalledWith( + { user: 'user-123' }, + expect.objectContaining({ + $push: expect.any(Object), + $inc: { newest_count: 1 }, + }), + { upsert: true } + ); + expect(fcm_service.sendNotificationToUserDevice).toHaveBeenCalled(); + }); + + it('should handle REPLY notification with blocked user', async () => { + const notification_data: ReplyNotificationEntity = { + type: NotificationType.REPLY, + replied_by: 'user-blocked', + reply_tweet_id: 'tweet-reply', + original_tweet_id: 'tweet-original', + conversation_id: 'conv-123', + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + const payload = { + replier: { id: 'user-blocked' }, + }; + + const blocked_user = { + ...mock_user, + id: 'user-blocked', + relation_blocked: true, + }; + + user_repository.createQueryBuilder = jest.fn().mockReturnValue({ + select: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([blocked_user]), + }); + + tweet_repository.createQueryBuilder = jest.fn().mockReturnValue({ + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([mock_tweet]), + }); + + jest.spyOn(service as any, 'tryAggregateNotification').mockResolvedValue({ + aggregated: false, + }); + + messages_gateway.isOnline.mockReturnValue(true); + + await service.saveNotificationAndSend('user-123', notification_data, payload); + + expect(notification_model.updateOne).toHaveBeenCalled(); + expect(notifications_gateway.sendToUser).not.toHaveBeenCalled(); + expect(fcm_service.sendNotificationToUserDevice).not.toHaveBeenCalled(); + }); + + it('should aggregate FOLLOW notification when recent one exists', async () => { + const notification_data: FollowNotificationEntity = { + type: NotificationType.FOLLOW, + follower_id: ['user-789'], + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + const payload = { + follower_id: 'user-789', + }; + + const updated_notification = { + ...notification_data, + follower_id: ['user-456', 'user-789'], + }; + + jest.spyOn(service as any, 'tryAggregateNotification').mockResolvedValue({ + aggregated: true, + updated_notification, + old_notification: notification_data, + }); + + jest.spyOn(service as any, 'fetchNotificationWithData').mockResolvedValue({ + type: NotificationType.FOLLOW, + followers: [mock_user], + }); + + await service.saveNotificationAndSend('user-123', notification_data, payload); + + expect(notification_model.updateOne).toHaveBeenCalledWith( + { user: 'user-123' }, + { $inc: { newest_count: 1 } } + ); + }); + + it('should handle MENTION notification', async () => { + const notification_data: MentionNotificationEntity = { + type: NotificationType.MENTION, + mentioned_by: 'user-456', + tweet_id: 'tweet-123', + tweet_type: 'normal', + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + const payload = { + mentioner: { id: 'user-456' }, + tweet: mock_tweet, + }; + + jest.spyOn(service as any, 'tryAggregateNotification').mockResolvedValue({ + aggregated: false, + }); + + user_repository.createQueryBuilder = jest.fn().mockReturnValue({ + select: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([mock_user]), + }); + + tweet_repository.createQueryBuilder = jest.fn().mockReturnValue({ + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([mock_tweet]), + }); + + await service.saveNotificationAndSend('user-123', notification_data, payload); + + expect(notification_model.updateOne).toHaveBeenCalled(); + }); + + it('should handle QUOTE notification', async () => { + const notification_data: QuoteNotificationEntity = { + type: NotificationType.QUOTE, + quoted_by: 'user-456', + quote_tweet_id: 'tweet-quote', + parent_tweet_id: 'tweet-parent', + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + const payload = { + quoter: { id: 'user-456' }, + }; + + jest.spyOn(service as any, 'tryAggregateNotification').mockResolvedValue({ + aggregated: false, + }); + + user_repository.createQueryBuilder = jest.fn().mockReturnValue({ + select: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([mock_user]), + }); + + tweet_repository.createQueryBuilder = jest.fn().mockReturnValue({ + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest + .fn() + .mockResolvedValue([mock_tweet, { ...mock_tweet, tweet_id: 'tweet-parent' }]), + }); + + await service.saveNotificationAndSend('user-123', notification_data, payload); + + expect(notification_model.updateOne).toHaveBeenCalled(); + }); + + it('should handle LIKE notification', async () => { + const notification_data: LikeNotificationEntity = { + type: NotificationType.LIKE, + liked_by: ['user-456'], + tweet_id: ['tweet-123'], + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + const payload = { + liker: mock_user, + tweet: mock_tweet, + }; + + jest.spyOn(service as any, 'tryAggregateNotification').mockResolvedValue({ + aggregated: false, + }); + + await service.saveNotificationAndSend('user-123', notification_data, payload); + + expect(notification_model.updateOne).toHaveBeenCalled(); + }); + + it('should handle REPOST notification', async () => { + const notification_data: RepostNotificationEntity = { + type: NotificationType.REPOST, + reposted_by: ['user-456'], + tweet_id: ['tweet-123'], + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + const payload = { + reposter: mock_user, + tweet: mock_tweet, + }; + + jest.spyOn(service as any, 'tryAggregateNotification').mockResolvedValue({ + aggregated: false, + }); + + await service.saveNotificationAndSend('user-123', notification_data, payload); + + expect(notification_model.updateOne).toHaveBeenCalled(); + }); + }); + + describe('sendNotificationOnly', () => { + it('should send notification through gateway', async () => { + const payload = { test: 'data' }; + + await service.sendNotificationOnly(NotificationType.FOLLOW, 'user-123', payload); + + expect(notifications_gateway.sendToUser).toHaveBeenCalledWith( + NotificationType.FOLLOW, + 'user-123', + payload + ); + }); + }); + + describe('clearNewestCount', () => { + it('should clear newest count for user', async () => { + await service.clearNewestCount('user-123'); + + expect(notification_model.updateOne).toHaveBeenCalledWith( + { user: 'user-123' }, + { $set: { newest_count: 0 } } + ); + }); + + it('should handle errors when clearing newest count', async () => { + const error = new Error('Database error'); + notification_model.updateOne.mockRejectedValue(error); + + await expect(service.clearNewestCount('user-123')).rejects.toThrow('Database error'); + }); + }); + + describe('getNewestCount', () => { + it('should return newest count for user', async () => { + notification_model.findOne.mockReturnValue({ + select: jest.fn().mockReturnValue({ + lean: jest.fn().mockResolvedValue({ newest_count: 5 }), + }), + }); + + const result = await service.getNewestCount('user-123'); + + expect(notification_model.findOne).toHaveBeenCalled(); + expect(result).toBe(5); + }); + + it('should return 0 when no notifications exist', async () => { + notification_model.findOne.mockReturnValue({ + select: jest.fn().mockReturnValue({ + lean: jest.fn().mockResolvedValue(null), + }), + }); + + const result = await service.getNewestCount('user-123'); + + expect(result).toBe(0); + }); + + it('should handle errors when getting newest count', async () => { + notification_model.findOne.mockReturnValue({ + select: jest.fn().mockReturnValue({ + lean: jest.fn().mockRejectedValue(new Error('Database error')), + }), + }); + + await expect(service.getNewestCount('user-123')).rejects.toThrow('Database error'); + }); + }); + + describe('deleteNotificationsByTweetIds', () => { + it('should delete notifications by tweet IDs', async () => { + await service.deleteNotificationsByTweetIds('user-123', ['tweet-1', 'tweet-2']); + + expect(notification_model.updateOne).toHaveBeenCalledTimes(2); + }); + + it('should handle errors when deleting notifications', async () => { + const error = new Error('Delete error'); + notification_model.updateOne.mockRejectedValue(error); + + await expect( + service.deleteNotificationsByTweetIds('user-123', ['tweet-1']) + ).rejects.toThrow('Delete error'); + }); + }); + + describe('cleanupNotificationsByUserIds', () => { + it('should cleanup notifications by user IDs', async () => { + await service.cleanupNotificationsByUserIds('user-123', ['user-456', 'user-789']); + + expect(notification_model.updateOne).toHaveBeenCalled(); + }); + }); + + describe('removeFollowNotification', () => { + it('should remove follow notification and return notification ID', async () => { + const notification_id = new Types.ObjectId(); + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: notification_id, + type: NotificationType.FOLLOW, + follower_id: ['user-456'], + created_at: new Date(), + }, + ], + }), + }); + notification_model.updateOne.mockResolvedValue({ modifiedCount: 1 }); + + const result = await service.removeFollowNotification('user-123', 'user-456'); + + expect(notification_model.updateOne).toHaveBeenCalled(); + expect(result).toBe(notification_id.toString()); + }); + + it('should return null when notification not found', async () => { + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue(null), + }); + + const result = await service.removeFollowNotification('user-123', 'user-456'); + + expect(result).toBeNull(); + }); + }); + + describe('removeLikeNotification', () => { + it('should remove like notification', async () => { + const notification_id = new Types.ObjectId(); + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: notification_id, + type: NotificationType.LIKE, + liked_by: ['user-456'], + tweet_id: ['tweet-123'], + created_at: new Date(), + }, + ], + }), + }); + notification_model.updateOne.mockResolvedValue({ modifiedCount: 1 }); + + const result = await service.removeLikeNotification( + 'user-123', + 'tweet-123', + 'user-456' + ); + + expect(result).toBe(notification_id.toString()); + }); + }); + + describe('removeRepostNotification', () => { + it('should remove repost notification', async () => { + const notification_id = new Types.ObjectId(); + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: notification_id, + type: NotificationType.REPOST, + reposted_by: ['user-456'], + tweet_id: ['tweet-123'], + created_at: new Date(), + }, + ], + }), + }); + notification_model.updateOne.mockResolvedValue({ modifiedCount: 1 }); + + const result = await service.removeRepostNotification( + 'user-123', + 'tweet-123', + 'user-456' + ); + + expect(result).toBe(notification_id.toString()); + }); + }); + + describe('removeReplyNotification', () => { + it('should remove reply notification', async () => { + const notification_id = new Types.ObjectId(); + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: notification_id, + type: NotificationType.REPLY, + replied_by: 'user-456', + reply_tweet_id: 'tweet-123', + original_tweet_id: 'tweet-original', + created_at: new Date(), + }, + ], + }), + }); + notification_model.updateOne.mockResolvedValue({ modifiedCount: 1 }); + + const result = await service.removeReplyNotification( + 'user-123', + 'tweet-123', + 'user-456' + ); + + expect(result).toBe(notification_id.toString()); + }); + }); + + describe('removeQuoteNotification', () => { + it('should remove quote notification', async () => { + const notification_id = new Types.ObjectId(); + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: notification_id, + type: NotificationType.QUOTE, + quoted_by: 'user-456', + quote_tweet_id: 'tweet-123', + parent_tweet_id: 'tweet-parent', + created_at: new Date(), + }, + ], + }), + }); + notification_model.updateOne.mockResolvedValue({ modifiedCount: 1 }); + + const result = await service.removeQuoteNotification( + 'user-123', + 'tweet-123', + 'user-456' + ); + + expect(result).toBe(notification_id.toString()); + }); + }); + + describe('removeMentionNotification', () => { + it('should remove mention notification', async () => { + const notification_id = new Types.ObjectId(); + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: notification_id, + type: NotificationType.MENTION, + mentioned_by: 'user-456', + tweet_id: 'tweet-123', + tweet_type: 'normal', + created_at: new Date(), + }, + ], + }), + }); + notification_model.updateOne.mockResolvedValue({ modifiedCount: 1 }); + + const result = await service.removeMentionNotification( + 'user-123', + 'tweet-123', + 'user-456' + ); + + expect(result).toBe(notification_id.toString()); + }); + }); + + describe('getUserNotifications', () => { + it('should return paginated notifications', async () => { + user_repository.createQueryBuilder = jest.fn().mockReturnValue({ + select: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([mock_user]), + }); + + tweet_repository.createQueryBuilder = jest.fn().mockReturnValue({ + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([mock_tweet]), + }); + + const result = await service.getUserNotifications('user-123', 1); + + expect(result).toHaveProperty('notifications'); + expect(result).toHaveProperty('page'); + expect(result).toHaveProperty('total'); + }); + + it('should return empty result when no notifications exist', async () => { + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockReturnValue({ + exec: jest.fn().mockResolvedValue(null), + }), + }); + + const result = await service.getUserNotifications('user-123', 1); + + expect(result.notifications).toEqual([]); + expect(result.total).toBe(0); + }); + + it('should filter blocked users from notifications', async () => { + const blocked_user = { + ...mock_user, + id: 'blocked-user', + relation_blocked: true, + }; + + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockReturnValue({ + exec: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.REPLY, + replied_by: 'blocked-user', + reply_tweet_id: 'tweet-123', + original_tweet_id: 'tweet-456', + conversation_id: 'conv-123', + created_at: new Date(), + }, + ], + }), + }), + }); + + user_repository.createQueryBuilder = jest.fn().mockReturnValue({ + select: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([blocked_user]), + }); + + tweet_repository.createQueryBuilder = jest.fn().mockReturnValue({ + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest + .fn() + .mockResolvedValue([mock_tweet, { ...mock_tweet, tweet_id: 'tweet-456' }]), + }); + + const result = await service.getUserNotifications('user-123', 1); + + expect(result.notifications).toHaveLength(0); + }); + }); + + describe('getMentionsAndReplies', () => { + it('should return only mention and reply notifications', async () => { + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockReturnValue({ + exec: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.MENTION, + mentioned_by: 'user-456', + tweet_id: 'tweet-123', + tweet_type: 'normal', + created_at: new Date(), + }, + { + _id: new Types.ObjectId(), + type: NotificationType.REPLY, + replied_by: 'user-789', + reply_tweet_id: 'tweet-456', + original_tweet_id: 'tweet-123', + conversation_id: 'conv-123', + created_at: new Date(), + }, + ], + }), + }), + }); + + user_repository.createQueryBuilder = jest.fn().mockReturnValue({ + select: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([ + { ...mock_user, id: 'user-456' }, + { ...mock_user, id: 'user-789' }, + ]), + }); + + tweet_repository.createQueryBuilder = jest.fn().mockReturnValue({ + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest + .fn() + .mockResolvedValue([mock_tweet, { ...mock_tweet, tweet_id: 'tweet-456' }]), + }); + + const result = await service.getMentionsAndReplies('user-123', 1); + + expect(result.notifications).toHaveLength(2); + }); + + it('should filter blocked users from mentions and replies', async () => { + const blocked_user = { + ...mock_user, + id: 'blocked-user', + relation_blocked: true, + }; + + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockReturnValue({ + exec: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.MENTION, + mentioned_by: 'blocked-user', + tweet_id: 'tweet-123', + tweet_type: 'normal', + created_at: new Date(), + }, + ], + }), + }), + }); + + user_repository.createQueryBuilder = jest.fn().mockReturnValue({ + select: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([blocked_user]), + }); + + tweet_repository.createQueryBuilder = jest.fn().mockReturnValue({ + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([mock_tweet]), + }); + + const result = await service.getMentionsAndReplies('user-123', 1); + + expect(result.notifications).toHaveLength(0); + }); + + it('should return empty result when no mention/reply notifications exist', async () => { + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockReturnValue({ + exec: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.FOLLOW, + follower_id: ['user-456'], + created_at: new Date(), + }, + ], + }), + }), + }); + + const result = await service.getMentionsAndReplies('user-123', 1); + + expect(result.notifications).toEqual([]); + expect(result.total).toBe(0); + }); + }); + + describe('tryAggregateNotification', () => { + it('should aggregate LIKE notification by tweet (same tweet, different person)', async () => { + const notification_data: LikeNotificationEntity = { + type: NotificationType.LIKE, + liked_by: ['user-789'], + tweet_id: ['tweet-123'], + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.LIKE, + liked_by: ['user-456'], + tweet_id: ['tweet-123'], + created_at: new Date(), + }, + ], + }), + }); + + notification_model.findOneAndUpdate.mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.LIKE, + liked_by: ['user-456', 'user-789'], + tweet_id: ['tweet-123'], + created_at: new Date(), + }, + ], + }); + + const result = await (service as any).tryAggregateNotification( + 'user-123', + notification_data + ); + + expect(result.aggregated).toBe(true); + expect(notification_model.findOneAndUpdate).toHaveBeenCalled(); + }); + + it('should aggregate REPOST notification by tweet', async () => { + const notification_data: RepostNotificationEntity = { + type: NotificationType.REPOST, + reposted_by: ['user-789'], + tweet_id: ['tweet-123'], + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.REPOST, + reposted_by: ['user-456'], + tweet_id: ['tweet-123'], + created_at: new Date(), + }, + ], + }), + }); + + notification_model.findOneAndUpdate.mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.REPOST, + reposted_by: ['user-456', 'user-789'], + tweet_id: ['tweet-123'], + created_at: new Date(), + }, + ], + }); + + const result = await (service as any).tryAggregateNotification( + 'user-123', + notification_data + ); + + expect(result.aggregated).toBe(true); + }); + + it('should not aggregate for non-aggregatable notification types', async () => { + const notification_data: ReplyNotificationEntity = { + type: NotificationType.REPLY, + replied_by: 'user-456', + reply_tweet_id: 'tweet-reply', + original_tweet_id: 'tweet-original', + conversation_id: 'conv-123', + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + const result = await (service as any).tryAggregateNotification( + 'user-123', + notification_data + ); + + expect(result.aggregated).toBe(false); + }); + + it('should not aggregate when no existing notification found', async () => { + const notification_data: LikeNotificationEntity = { + type: NotificationType.LIKE, + liked_by: ['user-789'], + tweet_id: ['tweet-123'], + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue(null), + }); + + const result = await (service as any).tryAggregateNotification( + 'user-123', + notification_data + ); + + expect(result.aggregated).toBe(false); + }); + }); + + describe('normalizeNotificationData', () => { + it('should normalize FOLLOW notification follower_id to array', () => { + const notification_data = { + type: NotificationType.FOLLOW, + follower_id: 'user-456', + created_at: new Date(), + } as any; + + (service as any).normalizeNotificationData(notification_data); + + expect(Array.isArray(notification_data.follower_id)).toBe(true); + expect(notification_data.follower_id).toEqual(['user-456']); + }); + + it('should normalize LIKE notification fields to arrays', () => { + const notification_data = { + type: NotificationType.LIKE, + liked_by: 'user-456', + tweet_id: 'tweet-123', + created_at: new Date(), + } as any; + + (service as any).normalizeNotificationData(notification_data); + + expect(Array.isArray(notification_data.liked_by)).toBe(true); + expect(Array.isArray(notification_data.tweet_id)).toBe(true); + }); + + it('should normalize REPOST notification fields to arrays', () => { + const notification_data = { + type: NotificationType.REPOST, + reposted_by: 'user-456', + tweet_id: 'tweet-123', + created_at: new Date(), + } as any; + + (service as any).normalizeNotificationData(notification_data); + + expect(Array.isArray(notification_data.reposted_by)).toBe(true); + expect(Array.isArray(notification_data.tweet_id)).toBe(true); + }); + }); + + describe('enrichUserWithStatus', () => { + it('should add relationship status to user', () => { + const user = { + ...mock_user, + relation_following: true, + relation_follower: false, + relation_blocked: false, + relation_muted: true, + }; + + const result = (service as any).enrichUserWithStatus(user); + + expect(result.is_following).toBe(true); + expect(result.is_follower).toBe(false); + expect(result.is_blocked).toBe(false); + expect(result.is_muted).toBe(true); + }); + }); + + describe('enrichTweetWithStatus', () => { + it('should add interaction status to tweet', () => { + const tweet = { + ...mock_tweet, + current_user_like: true, + current_user_repost: false, + current_user_bookmark: true, + }; + + const result = (service as any).enrichTweetWithStatus(tweet); + + expect(result.is_liked).toBe(true); + expect(result.is_reposted).toBe(false); + expect(result.is_bookmarked).toBe(true); + }); + }); + + describe('cleanUser', () => { + it('should remove relationship status from user', () => { + const user = { + ...mock_user, + is_following: true, + is_follower: true, + }; + + const result = (service as any).cleanUser(user); + + expect(result.is_following).toBeUndefined(); + expect(result.is_follower).toBeUndefined(); + }); + }); + + describe('cleanTweet', () => { + it('should remove interaction status from tweet', () => { + const tweet = { + ...mock_tweet, + is_liked: true, + is_reposted: true, + }; + + const result = (service as any).cleanTweet(tweet); + + expect(result.is_liked).toBeUndefined(); + expect(result.is_reposted).toBeUndefined(); + }); + }); + + describe('getUserNotifications with LIKE notifications', () => { + it('should return LIKE notifications with user data', async () => { + const liker = { ...mock_user, id: 'user-liker' }; + + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockReturnValue({ + exec: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.LIKE, + liked_by: ['user-liker'], + tweet_id: ['tweet-123'], + created_at: new Date(), + }, + ], + }), + }), + }); + + user_repository.createQueryBuilder = jest.fn().mockReturnValue({ + select: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([liker]), + }); + + tweet_repository.createQueryBuilder = jest.fn().mockReturnValue({ + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([mock_tweet]), + }); + + const result = await service.getUserNotifications('user-123', 1); + + expect(result.notifications).toHaveLength(1); + expect(result.notifications[0].type).toBe(NotificationType.LIKE); + }); + }); + + describe('getUserNotifications with REPOST notifications', () => { + it('should return REPOST notifications with user data', async () => { + const reposter = { ...mock_user, id: 'user-reposter' }; + + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockReturnValue({ + exec: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.REPOST, + reposted_by: ['user-reposter'], + tweet_id: ['tweet-123'], + created_at: new Date(), + }, + ], + }), + }), + }); + + user_repository.createQueryBuilder = jest.fn().mockReturnValue({ + select: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([reposter]), + }); + + tweet_repository.createQueryBuilder = jest.fn().mockReturnValue({ + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([mock_tweet]), + }); + + const result = await service.getUserNotifications('user-123', 1); + + expect(result.notifications).toHaveLength(1); + expect(result.notifications[0].type).toBe(NotificationType.REPOST); + }); + }); + + describe('getUserNotifications with QUOTE notifications', () => { + it('should return QUOTE notifications', async () => { + const quoter = { ...mock_user, id: 'user-quoter' }; + const quote_tweet = { ...mock_tweet, tweet_id: 'tweet-quote' }; + const parent_tweet = { ...mock_tweet, tweet_id: 'tweet-parent' }; + + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockReturnValue({ + exec: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.QUOTE, + quoted_by: 'user-quoter', + quote_tweet_id: 'tweet-quote', + parent_tweet_id: 'tweet-parent', + created_at: new Date(), + }, + ], + }), + }), + }); + + user_repository.createQueryBuilder = jest.fn().mockReturnValue({ + select: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([quoter]), + }); + + tweet_repository.createQueryBuilder = jest.fn().mockReturnValue({ + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([quote_tweet, parent_tweet]), + }); + + const result = await service.getUserNotifications('user-123', 1); + + expect(result.notifications).toHaveLength(1); + expect(result.notifications[0].type).toBe(NotificationType.QUOTE); + }); + + it('should filter blocked quoters', async () => { + const blocked_quoter = { ...mock_user, id: 'user-quoter', relation_blocked: true }; + const quote_tweet = { ...mock_tweet, tweet_id: 'tweet-quote' }; + const parent_tweet = { ...mock_tweet, tweet_id: 'tweet-parent' }; + + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockReturnValue({ + exec: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.QUOTE, + quoted_by: 'user-quoter', + quote_tweet_id: 'tweet-quote', + parent_tweet_id: 'tweet-parent', + created_at: new Date(), + }, + ], + }), + }), + }); + + user_repository.createQueryBuilder = jest.fn().mockReturnValue({ + select: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([blocked_quoter]), + }); + + tweet_repository.createQueryBuilder = jest.fn().mockReturnValue({ + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([quote_tweet, parent_tweet]), + }); + + const result = await service.getUserNotifications('user-123', 1); + + expect(result.notifications).toHaveLength(0); + }); + }); + + describe('remove notifications - not found cases', () => { + it('should return null when like notification is not found', async () => { + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [], + }), + }); + + const result = await service.removeLikeNotification( + 'user-123', + 'tweet-123', + 'user-456' + ); + + expect(result).toBeNull(); + }); + + it('should return null when repost notification is not found', async () => { + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [], + }), + }); + + const result = await service.removeRepostNotification( + 'user-123', + 'tweet-123', + 'user-456' + ); + + expect(result).toBeNull(); + }); + + it('should return null when reply notification is not found', async () => { + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [], + }), + }); + + const result = await service.removeReplyNotification( + 'user-123', + 'tweet-123', + 'user-456' + ); + + expect(result).toBeNull(); + }); + + it('should return null when quote notification is not found', async () => { + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [], + }), + }); + + const result = await service.removeQuoteNotification( + 'user-123', + 'tweet-123', + 'user-456' + ); + + expect(result).toBeNull(); + }); + + it('should return null when mention notification is not found', async () => { + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [], + }), + }); + + const result = await service.removeMentionNotification( + 'user-123', + 'tweet-123', + 'user-456' + ); + + expect(result).toBeNull(); + }); + }); + + describe('tryAggregateNotification - FOLLOW', () => { + it('should aggregate FOLLOW notification when recent one exists', async () => { + const notification_data: FollowNotificationEntity = { + type: NotificationType.FOLLOW, + follower_id: ['user-789'], + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.FOLLOW, + follower_id: ['user-456'], + created_at: new Date(), + }, + ], + }), + }); + + notification_model.findOneAndUpdate.mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.FOLLOW, + follower_id: ['user-456', 'user-789'], + created_at: new Date(), + }, + ], + }); + + const result = await (service as any).tryAggregateNotification( + 'user-123', + notification_data + ); + + expect(result.aggregated).toBe(true); + expect(notification_model.findOneAndUpdate).toHaveBeenCalled(); + }); + + it('should not aggregate FOLLOW when no recent notification exists', async () => { + const notification_data: FollowNotificationEntity = { + type: NotificationType.FOLLOW, + follower_id: ['user-789'], + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + const old_date = new Date(); + old_date.setDate(old_date.getDate() - 2); // More than 1 day ago + + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.FOLLOW, + follower_id: ['user-456'], + created_at: old_date, + }, + ], + }), + }); + + const result = await (service as any).tryAggregateNotification( + 'user-123', + notification_data + ); + + expect(result.aggregated).toBe(false); + }); + + it('should not aggregate FOLLOW when findOneAndUpdate returns null', async () => { + const notification_data: FollowNotificationEntity = { + type: NotificationType.FOLLOW, + follower_id: ['user-789'], + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.FOLLOW, + follower_id: ['user-456'], + created_at: new Date(), + }, + ], + }), + }); + + notification_model.findOneAndUpdate.mockResolvedValue(null); + + const result = await (service as any).tryAggregateNotification( + 'user-123', + notification_data + ); + + expect(result.aggregated).toBe(false); + }); + }); + + describe('tryAggregateNotification - LIKE by person', () => { + it('should aggregate LIKE notification by person (same person, different tweets)', async () => { + const notification_data: LikeNotificationEntity = { + type: NotificationType.LIKE, + liked_by: ['user-456'], + tweet_id: ['tweet-999'], + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.LIKE, + liked_by: ['user-456'], + tweet_id: ['tweet-123'], + created_at: new Date(), + }, + ], + }), + }); + + notification_model.findOneAndUpdate.mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.LIKE, + liked_by: ['user-456'], + tweet_id: ['tweet-123', 'tweet-999'], + created_at: new Date(), + }, + ], + }); + + const result = await (service as any).tryAggregateNotification( + 'user-123', + notification_data + ); + + expect(result.aggregated).toBe(true); + }); + }); + + describe('tryAggregateNotification - REPOST by person', () => { + it('should aggregate REPOST notification by person (same person, different tweets)', async () => { + const notification_data: RepostNotificationEntity = { + type: NotificationType.REPOST, + reposted_by: ['user-456'], + tweet_id: ['tweet-999'], + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.REPOST, + reposted_by: ['user-456'], + tweet_id: ['tweet-123'], + created_at: new Date(), + }, + ], + }), + }); + + notification_model.findOneAndUpdate.mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.REPOST, + reposted_by: ['user-456'], + tweet_id: ['tweet-123', 'tweet-999'], + created_at: new Date(), + }, + ], + }); + + const result = await (service as any).tryAggregateNotification( + 'user-123', + notification_data + ); + + expect(result.aggregated).toBe(true); + }); + + it('should not aggregate REPOST when findOneAndUpdate returns null', async () => { + const notification_data: RepostNotificationEntity = { + type: NotificationType.REPOST, + reposted_by: ['user-789'], + tweet_id: ['tweet-123'], + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: new Types.ObjectId(), + type: NotificationType.REPOST, + reposted_by: ['user-456'], + tweet_id: ['tweet-123'], + created_at: new Date(), + }, + ], + }), + }); + + notification_model.findOneAndUpdate.mockResolvedValue(null); + + const result = await (service as any).tryAggregateNotification( + 'user-123', + notification_data + ); + + expect(result.aggregated).toBe(false); + }); + }); + + describe('removeLikeNotification - aggregated cases', () => { + it('should remove like from aggregated notification by tweet', async () => { + const notification_id = new Types.ObjectId(); + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: notification_id, + type: NotificationType.LIKE, + liked_by: ['user-456', 'user-789'], + tweet_id: ['tweet-123'], + created_at: new Date(), + }, + ], + }), + }); + notification_model.updateOne.mockResolvedValue({ modifiedCount: 1 }); + + const result = await service.removeLikeNotification( + 'user-123', + 'tweet-123', + 'user-456' + ); + + expect(result).toBe(notification_id.toString()); + }); + + it('should remove like from aggregated notification by person', async () => { + const notification_id = new Types.ObjectId(); + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: notification_id, + type: NotificationType.LIKE, + liked_by: ['user-456'], + tweet_id: ['tweet-123', 'tweet-456'], + created_at: new Date(), + }, + ], + }), + }); + notification_model.updateOne.mockResolvedValue({ modifiedCount: 1 }); + + const result = await service.removeLikeNotification( + 'user-123', + 'tweet-123', + 'user-456' + ); + + expect(result).toBe(notification_id.toString()); + }); + }); + + describe('removeRepostNotification - aggregated cases', () => { + it('should remove repost from aggregated notification by tweet', async () => { + const notification_id = new Types.ObjectId(); + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: notification_id, + type: NotificationType.REPOST, + reposted_by: ['user-456', 'user-789'], + tweet_id: ['tweet-123'], + created_at: new Date(), + }, + ], + }), + }); + notification_model.updateOne.mockResolvedValue({ modifiedCount: 1 }); + + const result = await service.removeRepostNotification( + 'user-123', + 'tweet-123', + 'user-456' + ); + + expect(result).toBe(notification_id.toString()); + }); + + it('should remove repost from aggregated notification by person', async () => { + const notification_id = new Types.ObjectId(); + notification_model.findOne.mockReturnValue({ + lean: jest.fn().mockResolvedValue({ + notifications: [ + { + _id: notification_id, + type: NotificationType.REPOST, + reposted_by: ['user-456'], + tweet_id: ['tweet-123', 'tweet-456'], + created_at: new Date(), + }, + ], + }), + }); + notification_model.updateOne.mockResolvedValue({ modifiedCount: 1 }); + + const result = await service.removeRepostNotification( + 'user-123', + 'tweet-123', + 'user-456' + ); + + expect(result).toBe(notification_id.toString()); + }); + }); + + describe('saveNotificationAndSend - aggregated notifications', () => { + it('should handle aggregated LIKE notification and send via socket', async () => { + const notification_data: LikeNotificationEntity = { + type: NotificationType.LIKE, + liked_by: ['user-789'], + tweet_id: ['tweet-123'], + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + const payload = { liker: mock_user }; + + jest.spyOn(service as any, 'tryAggregateNotification').mockResolvedValue({ + aggregated: true, + updated_notification: { + _id: new Types.ObjectId(), + type: NotificationType.LIKE, + liked_by: ['user-456', 'user-789'], + tweet_id: ['tweet-123'], + created_at: new Date(), + }, + old_notification: { + id: new Types.ObjectId().toString(), + type: NotificationType.LIKE, + liked_by: ['user-456'], + tweet_id: ['tweet-123'], + }, + }); + + jest.spyOn(service as any, 'fetchNotificationWithData').mockResolvedValue({ + type: NotificationType.LIKE, + liked_by: [mock_user], + tweets: [mock_tweet], + }); + + messages_gateway.isOnline.mockReturnValue(true); + + await service.saveNotificationAndSend('user-123', notification_data, payload); + + expect(notifications_gateway.sendToUser).toHaveBeenCalledWith( + NotificationType.LIKE, + 'user-123', + expect.objectContaining({ + action: 'aggregate', + }) + ); + }); + + it('should handle aggregated REPOST notification and send via FCM', async () => { + const notification_data: RepostNotificationEntity = { + type: NotificationType.REPOST, + reposted_by: ['user-789'], + tweet_id: ['tweet-123'], + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + const payload = { reposter: mock_user }; + + jest.spyOn(service as any, 'tryAggregateNotification').mockResolvedValue({ + aggregated: true, + updated_notification: { + _id: new Types.ObjectId(), + type: NotificationType.REPOST, + reposted_by: ['user-456', 'user-789'], + tweet_id: ['tweet-123'], + created_at: new Date(), + }, + old_notification: { + id: new Types.ObjectId().toString(), + type: NotificationType.REPOST, + }, + }); + + jest.spyOn(service as any, 'fetchNotificationWithData').mockResolvedValue({ + type: NotificationType.REPOST, + reposted_by: [mock_user], + tweets: [mock_tweet], + }); + + messages_gateway.isOnline.mockReturnValue(false); + + await service.saveNotificationAndSend('user-123', notification_data, payload); + + expect(fcm_service.sendNotificationToUserDevice).toHaveBeenCalledWith( + 'user-123', + NotificationType.REPOST, + expect.objectContaining({ + action: 'aggregate', + }) + ); + }); + }); + + describe('getTweetsWithInteractions', () => { + it('should return empty array for empty tweet_ids', async () => { + const result = await (service as any).getTweetsWithInteractions([], 'user-123', true); + expect(result).toEqual([]); + }); + + it('should fetch tweets with interactions when flag is true', async () => { + tweet_repository.createQueryBuilder = jest.fn().mockReturnValue({ + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([mock_tweet]), + }); + + const result = await (service as any).getTweetsWithInteractions( + ['tweet-123'], + 'user-123', + true + ); + + expect(result).toHaveLength(1); + expect(tweet_repository.createQueryBuilder).toHaveBeenCalled(); + }); + + it('should fetch tweets without interactions when flag is false', async () => { + tweet_repository.createQueryBuilder = jest.fn().mockReturnValue({ + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([mock_tweet]), + }); + + const result = await (service as any).getTweetsWithInteractions( + ['tweet-123'], + 'user-123', + false + ); + + expect(result).toHaveLength(1); + }); + }); + + describe('getUsersWithRelationships', () => { + it('should return empty array for empty user_ids', async () => { + const result = await (service as any).getUsersWithRelationships([], 'user-123', true); + expect(result).toEqual([]); + }); + + it('should fetch users with relationships when flag is true', async () => { + user_repository.createQueryBuilder = jest.fn().mockReturnValue({ + select: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([mock_user]), + }); + + const result = await (service as any).getUsersWithRelationships( + ['user-123'], + 'user-456', + true + ); + + expect(result).toHaveLength(1); + }); + + it('should fetch users without relationships when flag is false', async () => { + user_repository.createQueryBuilder = jest.fn().mockReturnValue({ + select: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([mock_user]), + }); + + const result = await (service as any).getUsersWithRelationships( + ['user-123'], + 'user-456', + false + ); + + expect(result).toHaveLength(1); + }); + }); + + describe('saveNotificationAndSend - REPLY notification flow', () => { + it('should send REPLY notification via socket when user is online and not blocked', async () => { + const notification_data: ReplyNotificationEntity = { + type: NotificationType.REPLY, + replied_by: 'user-456', + reply_tweet_id: 'tweet-reply', + original_tweet_id: 'tweet-original', + conversation_id: 'conv-123', + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + const payload = { + replier: { id: 'user-456' }, + }; + + const replier_user = { + ...mock_user, + id: 'user-456', + relation_blocked: undefined, + }; + + user_repository.createQueryBuilder = jest.fn().mockReturnValue({ + select: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([replier_user]), + }); + + tweet_repository.createQueryBuilder = jest.fn().mockReturnValue({ + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([ + { ...mock_tweet, tweet_id: 'tweet-reply' }, + { ...mock_tweet, tweet_id: 'tweet-original' }, + ]), + }); + + jest.spyOn(service as any, 'tryAggregateNotification').mockResolvedValue({ + aggregated: false, + }); + + messages_gateway.isOnline.mockReturnValue(true); + + await service.saveNotificationAndSend('user-123', notification_data, payload); + + expect(notifications_gateway.sendToUser).toHaveBeenCalledWith( + NotificationType.REPLY, + 'user-123', + expect.objectContaining({ + action: 'add', + }) + ); + }); + + it('should send REPLY notification via FCM when user is offline and not blocked', async () => { + const notification_data: ReplyNotificationEntity = { + type: NotificationType.REPLY, + replied_by: 'user-456', + reply_tweet_id: 'tweet-reply', + original_tweet_id: 'tweet-original', + conversation_id: 'conv-123', + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + const payload = { + replier: { id: 'user-456' }, + }; + + const replier_user = { + ...mock_user, + id: 'user-456', + relation_blocked: undefined, + }; + + user_repository.createQueryBuilder = jest.fn().mockReturnValue({ + select: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([replier_user]), + }); + + tweet_repository.createQueryBuilder = jest.fn().mockReturnValue({ + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([ + { ...mock_tweet, tweet_id: 'tweet-reply' }, + { ...mock_tweet, tweet_id: 'tweet-original' }, + ]), + }); + + jest.spyOn(service as any, 'tryAggregateNotification').mockResolvedValue({ + aggregated: false, + }); + + messages_gateway.isOnline.mockReturnValue(false); + + await service.saveNotificationAndSend('user-123', notification_data, payload); + + expect(fcm_service.sendNotificationToUserDevice).toHaveBeenCalled(); + }); + }); + + describe('saveNotificationAndSend - MENTION notification flow', () => { + it('should send MENTION notification when not blocked', async () => { + const notification_data: MentionNotificationEntity = { + type: NotificationType.MENTION, + mentioned_by: 'user-456', + tweet_id: 'tweet-123', + tweet_type: 'normal', + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + const payload = { + mentioner: { id: 'user-456' }, + }; + + const mentioner_user = { + ...mock_user, + id: 'user-456', + relation_blocked: undefined, + }; + + user_repository.createQueryBuilder = jest.fn().mockReturnValue({ + select: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([mentioner_user]), + }); + + tweet_repository.createQueryBuilder = jest.fn().mockReturnValue({ + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([mock_tweet]), + }); + + jest.spyOn(service as any, 'tryAggregateNotification').mockResolvedValue({ + aggregated: false, + }); + + messages_gateway.isOnline.mockReturnValue(true); + + await service.saveNotificationAndSend('user-123', notification_data, payload); + + expect(notifications_gateway.sendToUser).toHaveBeenCalledWith( + NotificationType.MENTION, + 'user-123', + expect.objectContaining({ + action: 'add', + }) + ); + }); + + it('should not send MENTION notification when user is blocked', async () => { + const notification_data: MentionNotificationEntity = { + type: NotificationType.MENTION, + mentioned_by: 'user-blocked', + tweet_id: 'tweet-123', + tweet_type: 'normal', + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + const payload = { + mentioner: { id: 'user-blocked' }, + }; + + const blocked_user = { + ...mock_user, + id: 'user-blocked', + relation_blocked: true, + }; + + user_repository.createQueryBuilder = jest.fn().mockReturnValue({ + select: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([blocked_user]), + }); + + tweet_repository.createQueryBuilder = jest.fn().mockReturnValue({ + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([mock_tweet]), + }); + + jest.spyOn(service as any, 'tryAggregateNotification').mockResolvedValue({ + aggregated: false, + }); + + messages_gateway.isOnline.mockReturnValue(true); + + await service.saveNotificationAndSend('user-123', notification_data, payload); + + expect(notifications_gateway.sendToUser).not.toHaveBeenCalled(); + expect(fcm_service.sendNotificationToUserDevice).not.toHaveBeenCalled(); + }); + }); + + describe('saveNotificationAndSend - QUOTE notification flow', () => { + it('should send QUOTE notification when not blocked', async () => { + const notification_data: QuoteNotificationEntity = { + type: NotificationType.QUOTE, + quoted_by: 'user-456', + quote_tweet_id: 'tweet-quote', + parent_tweet_id: 'tweet-parent', + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + const payload = { + quoter: { id: 'user-456' }, + }; + + const quoter_user = { + ...mock_user, + id: 'user-456', + relation_blocked: undefined, + }; + + user_repository.createQueryBuilder = jest.fn().mockReturnValue({ + select: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([quoter_user]), + }); + + tweet_repository.createQueryBuilder = jest.fn().mockReturnValue({ + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([ + { ...mock_tweet, tweet_id: 'tweet-quote' }, + { ...mock_tweet, tweet_id: 'tweet-parent' }, + ]), + }); + + jest.spyOn(service as any, 'tryAggregateNotification').mockResolvedValue({ + aggregated: false, + }); + + messages_gateway.isOnline.mockReturnValue(true); + + await service.saveNotificationAndSend('user-123', notification_data, payload); + + expect(notifications_gateway.sendToUser).toHaveBeenCalled(); + }); + + it('should not send QUOTE notification when user is blocked', async () => { + const notification_data: QuoteNotificationEntity = { + type: NotificationType.QUOTE, + quoted_by: 'user-blocked', + quote_tweet_id: 'tweet-quote', + parent_tweet_id: 'tweet-parent', + created_at: new Date(), + _id: new Types.ObjectId(), + }; + + const payload = { + quoter: { id: 'user-blocked' }, + }; + + const blocked_user = { + ...mock_user, + id: 'user-blocked', + relation_blocked: true, + }; + + user_repository.createQueryBuilder = jest.fn().mockReturnValue({ + select: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([blocked_user]), + }); + + tweet_repository.createQueryBuilder = jest.fn().mockReturnValue({ + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([ + { ...mock_tweet, tweet_id: 'tweet-quote' }, + { ...mock_tweet, tweet_id: 'tweet-parent' }, + ]), + }); + + jest.spyOn(service as any, 'tryAggregateNotification').mockResolvedValue({ + aggregated: false, + }); + + messages_gateway.isOnline.mockReturnValue(true); + + await service.saveNotificationAndSend('user-123', notification_data, payload); + + expect(notifications_gateway.sendToUser).not.toHaveBeenCalled(); + expect(fcm_service.sendNotificationToUserDevice).not.toHaveBeenCalled(); + }); + }); + + describe('fetchNotificationWithData', () => { + it('should return null for null notification', async () => { + const result = await (service as any).fetchNotificationWithData('user-123', null); + expect(result).toBeNull(); + }); + + it('should fetch FOLLOW notification with user data', async () => { + const notification = { + _id: new Types.ObjectId(), + type: NotificationType.FOLLOW, + follower_id: ['user-456', 'user-789'], + created_at: new Date(), + }; + + user_repository.find.mockResolvedValue([ + { ...mock_user, id: 'user-456' }, + { ...mock_user, id: 'user-789' }, + ]); + tweet_repository.find.mockResolvedValue([]); + + const result = await (service as any).fetchNotificationWithData( + 'user-123', + notification + ); + + expect(result).toBeDefined(); + expect(result.type).toBe(NotificationType.FOLLOW); + }); + + it('should fetch LIKE notification with user and tweet data', async () => { + const notification = { + _id: new Types.ObjectId(), + type: NotificationType.LIKE, + liked_by: ['user-456'], + tweet_id: ['tweet-123'], + created_at: new Date(), + }; + + user_repository.find.mockResolvedValue([{ ...mock_user, id: 'user-456' }]); + tweet_repository.find.mockResolvedValue([mock_tweet]); + + const result = await (service as any).fetchNotificationWithData( + 'user-123', + notification + ); + + expect(result).toBeDefined(); + expect(result.type).toBe(NotificationType.LIKE); + }); + + it('should fetch REPOST notification with user and tweet data', async () => { + const notification = { + _id: new Types.ObjectId(), + type: NotificationType.REPOST, + reposted_by: ['user-456'], + tweet_id: ['tweet-123'], + created_at: new Date(), + }; + + user_repository.find.mockResolvedValue([{ ...mock_user, id: 'user-456' }]); + tweet_repository.find.mockResolvedValue([mock_tweet]); + + const result = await (service as any).fetchNotificationWithData( + 'user-123', + notification + ); + + expect(result).toBeDefined(); + expect(result.type).toBe(NotificationType.REPOST); + }); + }); + + describe('deduplicateNotifications', () => { + it('should deduplicate LIKE notifications with same tweet', () => { + const notifications = [ + { + type: NotificationType.LIKE, + liked_by: [{ id: 'user-456' }], + tweets: [{ tweet_id: 'tweet-123' }], + created_at: new Date(), + }, + { + type: NotificationType.LIKE, + liked_by: [{ id: 'user-789' }], + tweets: [{ tweet_id: 'tweet-123' }], + created_at: new Date(), + }, + ]; + + const result = (service as any).deduplicateNotifications(notifications); + + expect(result.length).toBeLessThanOrEqual(2); + }); + + it('should deduplicate FOLLOW notifications', () => { + const notifications = [ + { + type: NotificationType.FOLLOW, + followers: [{ id: 'user-456' }], + created_at: new Date(), + }, + { + type: NotificationType.FOLLOW, + followers: [{ id: 'user-789' }], + created_at: new Date(), + }, + ]; + + const result = (service as any).deduplicateNotifications(notifications); + + expect(result.length).toBeLessThanOrEqual(2); + }); + + it('should not deduplicate REPLY notifications', () => { + const notifications = [ + { + type: NotificationType.REPLY, + replier: { id: 'user-456' }, + reply_tweet: { tweet_id: 'tweet-123' }, + created_at: new Date('2023-01-01'), + }, + { + type: NotificationType.REPLY, + replier: { id: 'user-456' }, + reply_tweet: { tweet_id: 'tweet-456' }, + created_at: new Date('2023-01-02'), + }, + ]; + + const result = (service as any).deduplicateNotifications(notifications); + + expect(result).toHaveLength(2); + }); + }); }); diff --git a/src/notifications/notifications.service.ts b/src/notifications/notifications.service.ts index af2515a5..56ec2915 100644 --- a/src/notifications/notifications.service.ts +++ b/src/notifications/notifications.service.ts @@ -1,6 +1,6 @@ import { forwardRef, Inject, Injectable, OnModuleInit } from '@nestjs/common'; import { InjectModel } from '@nestjs/mongoose'; -import { Model } from 'mongoose'; +import { Model, Types } from 'mongoose'; import { Notification } from './entities/notifications.entity'; import { BaseNotificationEntity } from './entities/base-notification.entity'; import { NotificationType } from './enums/notification-types'; @@ -8,6 +8,12 @@ import { NotificationsGateway } from './notifications.gateway'; import { InjectRepository } from '@nestjs/typeorm'; import { User } from 'src/user/entities'; import { Tweet } from 'src/tweets/entities'; +import { TweetLike } from 'src/tweets/entities/tweet-like.entity'; +import { TweetRepost } from 'src/tweets/entities/tweet-repost.entity'; +import { TweetBookmark } from 'src/tweets/entities/tweet-bookmark.entity'; +import { UserFollows } from 'src/user/entities/user-follows.entity'; +import { UserBlocks } from 'src/user/entities/user-blocks.entity'; +import { UserMutes } from 'src/user/entities/user-mutes.entity'; import { In, Repository } from 'typeorm'; import { ReplyNotificationEntity } from './entities/reply-notification.entity'; import { RepostNotificationEntity } from './entities/repost-notification.entity'; @@ -15,11 +21,14 @@ import { QuoteNotificationEntity } from './entities/quote-notification.entity'; import { LikeNotificationEntity } from './entities/like-notification.entity'; import { FollowNotificationEntity } from './entities/follow-notification.entity'; import { MentionNotificationEntity } from './entities/mention-notification.entity'; +import { MessageNotificationEntity } from './entities/message-notification.entity'; import { NotificationDto } from './dto/notifications-response.dto'; -import { BackgroundJobsModule } from 'src/background-jobs'; import { ClearJobService } from 'src/background-jobs/notifications/clear/clear.service'; -import { FCMService } from 'src/fcm/fcm.service'; +import { FCMService } from 'src/expo/expo.service'; import { MessagesGateway } from 'src/messages/messages.gateway'; +import { plainToInstance } from 'class-transformer'; +import { TweetResponseDTO } from 'src/tweets/dto/tweet-response.dto'; +import { UserResponseDTO } from 'src/tweets/dto/user-response.dto'; @Injectable() export class NotificationsService implements OnModuleInit { @@ -48,15 +57,13 @@ export class NotificationsService implements OnModuleInit { payload: any ): Promise { if (!notification_data.created_at) notification_data.created_at = new Date(); + if (!notification_data._id) notification_data._id = new Types.ObjectId(); - // Normalize notification data to ensure arrays this.normalizeNotificationData(notification_data); - // Check if we can aggregate this notification const aggregation_result = await this.tryAggregateNotification(user_id, notification_data); if (!aggregation_result.aggregated) { - // If not aggregated, add as new notification and increment newest_count await this.notificationModel.updateOne( { user: user_id }, { @@ -72,28 +79,204 @@ export class NotificationsService implements OnModuleInit { { upsert: true } ); + const enriched_payload = { ...payload }; + + let is_blocked = false; + + if ( + notification_data.type === NotificationType.REPLY || + notification_data.type === NotificationType.MENTION || + notification_data.type === NotificationType.QUOTE + ) { + const tweet_ids = new Set(); + const tweet_ids_needing_interactions = new Set(); + let actor_id: string | undefined; + + if (notification_data.type === NotificationType.REPLY) { + const n = notification_data as ReplyNotificationEntity; + if (n.reply_tweet_id) { + tweet_ids.add(n.reply_tweet_id); + tweet_ids_needing_interactions.add(n.reply_tweet_id); + } + if (n.original_tweet_id) tweet_ids.add(n.original_tweet_id); + actor_id = n.replied_by; + } else if (notification_data.type === NotificationType.MENTION) { + const n = notification_data as MentionNotificationEntity; + if (n.tweet_id) { + tweet_ids.add(n.tweet_id); + tweet_ids_needing_interactions.add(n.tweet_id); + } + if (n.parent_tweet_id) tweet_ids.add(n.parent_tweet_id); + actor_id = n.mentioned_by; + } else if (notification_data.type === NotificationType.QUOTE) { + const n = notification_data as QuoteNotificationEntity; + if (n.quote_tweet_id) { + tweet_ids.add(n.quote_tweet_id); + tweet_ids_needing_interactions.add(n.quote_tweet_id); + } + if (n.parent_tweet_id) tweet_ids.add(n.parent_tweet_id); + actor_id = n.quoted_by; + } + + const tweet_ids_array = Array.from(tweet_ids); + const ids_needing_interactions = tweet_ids_array.filter((id) => + tweet_ids_needing_interactions.has(id) + ); + const ids_not_needing_interactions = tweet_ids_array.filter( + (id) => !tweet_ids_needing_interactions.has(id) + ); + + const promises: Promise[] = []; + if (ids_needing_interactions.length > 0) { + promises.push( + this.getTweetsWithInteractions(ids_needing_interactions, user_id, true) + ); + } else { + promises.push(Promise.resolve([])); + } + + if (ids_not_needing_interactions.length > 0) { + promises.push( + this.getTweetsWithInteractions(ids_not_needing_interactions, user_id, false) + ); + } else { + promises.push(Promise.resolve([])); + } + + if (actor_id) { + promises.push(this.getUsersWithRelationships([actor_id], user_id, true)); + } else { + promises.push(Promise.resolve([])); + } + + const [tweets_with_interactions, tweets_without_interactions, users] = + await Promise.all(promises); + const tweets = [ + ...(tweets_with_interactions as Tweet[]), + ...(tweets_without_interactions as Tweet[]), + ]; + const tweet_map = new Map(tweets.map((t) => [t.tweet_id, t])); + const actor = (users as User[]).length > 0 ? (users as User[])[0] : undefined; + + if (actor) { + const enriched_user = this.enrichUserWithStatus(actor); + if (enriched_user.is_blocked) { + is_blocked = true; + } + if (notification_data.type === NotificationType.REPLY) { + enriched_payload.replier = enriched_user; + } else if (notification_data.type === NotificationType.MENTION) { + enriched_payload.mentioner = enriched_user; + } else if (notification_data.type === NotificationType.QUOTE) { + enriched_payload.quoter = enriched_user; + } + } + + if (tweet_ids.size > 0) { + if (notification_data.type === NotificationType.REPLY) { + const n = notification_data as ReplyNotificationEntity; + if (n.reply_tweet_id && tweet_map.has(n.reply_tweet_id)) { + enriched_payload.reply_tweet = this.enrichTweetWithStatus( + tweet_map.get(n.reply_tweet_id)! + ); + } + if (n.original_tweet_id && tweet_map.has(n.original_tweet_id)) { + enriched_payload.original_tweet = this.cleanTweet( + tweet_map.get(n.original_tweet_id)! + ); + } + } else if (notification_data.type === NotificationType.MENTION) { + const n = notification_data as MentionNotificationEntity; + if (n.tweet_id && tweet_map.has(n.tweet_id)) { + let t = tweet_map.get(n.tweet_id)!; + if ( + n.tweet_type === 'quote' && + n.parent_tweet_id && + tweet_map.has(n.parent_tweet_id) + ) { + t = { + ...t, + parent_tweet: this.cleanTweet( + tweet_map.get(n.parent_tweet_id)! + ), + } as any; + } + enriched_payload.tweet = this.enrichTweetWithStatus(t); + } + } else if (notification_data.type === NotificationType.QUOTE) { + const n = notification_data as QuoteNotificationEntity; + if (n.quote_tweet_id && tweet_map.has(n.quote_tweet_id)) { + let t = tweet_map.get(n.quote_tweet_id)!; + if (n.parent_tweet_id && tweet_map.has(n.parent_tweet_id)) { + t = { + ...t, + parent_tweet: this.cleanTweet( + tweet_map.get(n.parent_tweet_id)! + ), + } as any; + } + enriched_payload.quote_tweet = this.enrichTweetWithStatus(t); + } + } + } + } else if ( + notification_data.type === NotificationType.LIKE || + notification_data.type === NotificationType.REPOST + ) { + if (notification_data.type === NotificationType.LIKE) { + if (payload.tweet) { + enriched_payload.tweet = this.cleanTweet(payload.tweet); + } + if (payload.liker) { + enriched_payload.liker = this.cleanUser(payload.liker); + } + } else if (notification_data.type === NotificationType.REPOST) { + if (payload.tweet) { + enriched_payload.tweet = this.cleanTweet(payload.tweet); + } + if (payload.reposter) { + enriched_payload.reposter = this.cleanUser(payload.reposter); + } + } + } else if (notification_data.type === NotificationType.FOLLOW) { + enriched_payload.follower = { + id: payload.follower_id, + username: payload.follower_username, + name: payload.follower_name, + avatar_url: payload.follower_avatar_url, + }; + delete enriched_payload.follower_id; + delete enriched_payload.follower_username; + delete enriched_payload.follower_name; + delete enriched_payload.follower_avatar_url; + delete enriched_payload.followed_id; + } + const is_online = this.messagesGateway.isOnline(user_id); - if (is_online) { + if (is_online && !is_blocked) { + enriched_payload.created_at = new Date(); this.notificationsGateway.sendToUser(notification_data.type, user_id, { - ...payload, + ...enriched_payload, + id: notification_data._id.toString(), action: 'add', }); - } else { + } else if (!is_blocked) { await this.fcmService.sendNotificationToUserDevice( user_id, notification_data.type, - payload + { + ...payload, + id: notification_data._id.toString(), + } ); } } else { - // Increment newest_count for aggregated notification await this.notificationModel.updateOne( { user: user_id }, { $inc: { newest_count: 1 } } ); - // Fetch and populate the aggregated notification with full data const aggregated_notification_with_data = await this.fetchNotificationWithData( user_id, aggregation_result.updated_notification @@ -102,6 +285,7 @@ export class NotificationsService implements OnModuleInit { const is_online = this.messagesGateway.isOnline(user_id); if (is_online) { + aggregated_notification_with_data.created_at = new Date(); this.notificationsGateway.sendToUser(notification_data.type, user_id, { ...aggregated_notification_with_data, action: 'aggregate', @@ -112,7 +296,7 @@ export class NotificationsService implements OnModuleInit { user_id, notification_data.type, { - ...aggregated_notification_with_data, + ...payload, action: 'aggregate', } ); @@ -169,7 +353,6 @@ export class NotificationsService implements OnModuleInit { ? follow_notification.follower_id[0] : follow_notification.follower_id; - // Find the user document and check for existing FOLLOW notification const user_document = await this.notificationModel .findOne({ user: user_id }) .lean(); @@ -191,7 +374,6 @@ export class NotificationsService implements OnModuleInit { recent_follow_notification_index ] as any; - // Update the specific notification and return the updated document const updated_doc = await this.notificationModel.findOneAndUpdate( { user: user_id, @@ -220,13 +402,11 @@ export class NotificationsService implements OnModuleInit { return { aggregated: false }; } - // Find the updated notification const updated_notification = updated_doc.notifications?.find( (n: any) => n.type === NotificationType.FOLLOW && new Date(n.created_at) >= one_day_ago ); - // If we can't find the updated notification, treat as non-aggregated if (!updated_notification) { return { aggregated: false }; } @@ -234,6 +414,7 @@ export class NotificationsService implements OnModuleInit { return { aggregated: true, old_notification: { + id: old_notification._id ? old_notification._id.toString() : undefined, type: old_notification.type, created_at: old_notification.created_at, follower_id: old_notification.follower_id, @@ -251,7 +432,6 @@ export class NotificationsService implements OnModuleInit { ? like_notification.liked_by[0] : like_notification.liked_by; - // Find the user document and check for existing LIKE notification const user_document = await this.notificationModel .findOne({ user: user_id }) .lean(); @@ -260,40 +440,25 @@ export class NotificationsService implements OnModuleInit { return { aggregated: false }; } - // First, try to find aggregation by TWEET (multiple people liking the same tweet) const matching_by_tweet_index = user_document.notifications.findIndex((n: any) => { if (n.type !== NotificationType.LIKE) return false; if (new Date(n.created_at) < one_day_ago) return false; - // Check if this notification is for the same tweet AND only has one tweet (not aggregated by person) const tweet_id_array = Array.isArray(n.tweet_id) ? n.tweet_id : [n.tweet_id]; - const liked_by_array = Array.isArray(n.liked_by) ? n.liked_by : [n.liked_by]; - return ( - tweet_id_array.includes(new_tweet_id) && - tweet_id_array.length === 1 && - liked_by_array.length === 1 - ); + return tweet_id_array.includes(new_tweet_id) && tweet_id_array.length === 1; }); - // Second, try to find aggregation by PERSON (same person liking multiple tweets) const matching_by_person_index = user_document.notifications.findIndex((n: any) => { if (n.type !== NotificationType.LIKE) return false; if (new Date(n.created_at) < one_day_ago) return false; - // Check if this notification contains a like from the same person AND only has one person (not aggregated by tweet) const liked_by_array = Array.isArray(n.liked_by) ? n.liked_by : [n.liked_by]; - const tweet_id_array = Array.isArray(n.tweet_id) ? n.tweet_id : [n.tweet_id]; - return ( - liked_by_array.includes(new_liked_by) && - liked_by_array.length === 1 && - tweet_id_array.length === 1 - ); + return liked_by_array.includes(new_liked_by) && liked_by_array.length === 1; }); let aggregation_type: 'tweet' | 'person' | null = null; let matching_index = -1; - // Prioritize aggregation by tweet if found if (matching_by_tweet_index !== -1) { aggregation_type = 'tweet'; matching_index = matching_by_tweet_index; @@ -306,10 +471,8 @@ export class NotificationsService implements OnModuleInit { const old_notification = user_document.notifications[matching_index] as any; - // Update based on aggregation type and return the updated document let updated_doc_like; if (aggregation_type === 'tweet') { - // Add the new person to the existing notification for this tweet updated_doc_like = await this.notificationModel.findOneAndUpdate( { user: user_id }, { @@ -325,6 +488,8 @@ export class NotificationsService implements OnModuleInit { { 'elem.type': NotificationType.LIKE, 'elem.tweet_id': new_tweet_id, + 'elem.tweet_id.0': { $exists: true }, + 'elem.tweet_id.1': { $exists: false }, 'elem.created_at': { $gte: one_day_ago }, }, ], @@ -333,7 +498,6 @@ export class NotificationsService implements OnModuleInit { } ); } else { - // Add the new tweet to the existing notification for this person updated_doc_like = await this.notificationModel.findOneAndUpdate( { user: user_id }, { @@ -349,6 +513,8 @@ export class NotificationsService implements OnModuleInit { { 'elem.type': NotificationType.LIKE, 'elem.liked_by': new_liked_by, + 'elem.liked_by.0': { $exists: true }, + 'elem.liked_by.1': { $exists: false }, 'elem.created_at': { $gte: one_day_ago }, }, ], @@ -362,7 +528,6 @@ export class NotificationsService implements OnModuleInit { return { aggregated: false }; } - // Find the updated notification const updated_notification_like = updated_doc_like.notifications?.find((n: any) => { if (n.type !== NotificationType.LIKE) return false; if (new Date(n.created_at) < one_day_ago) return false; @@ -374,7 +539,6 @@ export class NotificationsService implements OnModuleInit { ); }); - // If we can't find the updated notification, treat as non-aggregated if (!updated_notification_like) { return { aggregated: false }; } @@ -382,6 +546,7 @@ export class NotificationsService implements OnModuleInit { return { aggregated: true, old_notification: { + id: old_notification._id ? old_notification._id.toString() : undefined, type: old_notification.type, created_at: old_notification.created_at, tweet_id: old_notification.tweet_id, @@ -400,7 +565,6 @@ export class NotificationsService implements OnModuleInit { ? repost_notification.reposted_by[0] : repost_notification.reposted_by; - // Find the user document and check for existing REPOST notification const user_document = await this.notificationModel .findOne({ user: user_id }) .lean(); @@ -409,44 +573,31 @@ export class NotificationsService implements OnModuleInit { return { aggregated: false }; } - // First, try to find aggregation by TWEET (multiple people reposting the same tweet) const matching_by_tweet_index = user_document.notifications.findIndex((n: any) => { if (n.type !== NotificationType.REPOST) return false; if (new Date(n.created_at) < one_day_ago) return false; - // Check if this notification is for the same tweet AND only has one tweet (not aggregated by person) const tweet_id_array = Array.isArray(n.tweet_id) ? n.tweet_id : [n.tweet_id]; - const reposted_by_array = Array.isArray(n.reposted_by) - ? n.reposted_by - : [n.reposted_by]; - return ( - tweet_id_array.includes(new_tweet_id) && - tweet_id_array.length === 1 && - reposted_by_array.length === 1 - ); + + return tweet_id_array.includes(new_tweet_id) && tweet_id_array.length === 1; }); - // Second, try to find aggregation by PERSON (same person reposting multiple tweets) const matching_by_person_index = user_document.notifications.findIndex((n: any) => { if (n.type !== NotificationType.REPOST) return false; if (new Date(n.created_at) < one_day_ago) return false; - // Check if this notification contains a repost from the same person AND only has one person (not aggregated by tweet) const reposted_by_array = Array.isArray(n.reposted_by) ? n.reposted_by : [n.reposted_by]; - const tweet_id_array = Array.isArray(n.tweet_id) ? n.tweet_id : [n.tweet_id]; return ( reposted_by_array.includes(new_reposted_by) && - reposted_by_array.length === 1 && - tweet_id_array.length === 1 + reposted_by_array.length === 1 ); }); let aggregation_type: 'tweet' | 'person' | null = null; let matching_index = -1; - // Prioritize aggregation by tweet if found if (matching_by_tweet_index !== -1) { aggregation_type = 'tweet'; matching_index = matching_by_tweet_index; @@ -459,10 +610,8 @@ export class NotificationsService implements OnModuleInit { const old_notification = user_document.notifications[matching_index] as any; - // Update based on aggregation type and return the updated document let updated_doc_repost; if (aggregation_type === 'tweet') { - // Add the new person to the existing notification for this tweet updated_doc_repost = await this.notificationModel.findOneAndUpdate( { user: user_id }, { @@ -478,6 +627,8 @@ export class NotificationsService implements OnModuleInit { { 'elem.type': NotificationType.REPOST, 'elem.tweet_id': new_tweet_id, + 'elem.tweet_id.0': { $exists: true }, + 'elem.tweet_id.1': { $exists: false }, 'elem.created_at': { $gte: one_day_ago }, }, ], @@ -486,7 +637,6 @@ export class NotificationsService implements OnModuleInit { } ); } else { - // Add the new tweet to the existing notification for this person updated_doc_repost = await this.notificationModel.findOneAndUpdate( { user: user_id }, { @@ -502,6 +652,8 @@ export class NotificationsService implements OnModuleInit { { 'elem.type': NotificationType.REPOST, 'elem.reposted_by': new_reposted_by, + 'elem.reposted_by.0': { $exists: true }, + 'elem.reposted_by.1': { $exists: false }, 'elem.created_at': { $gte: one_day_ago }, }, ], @@ -515,7 +667,6 @@ export class NotificationsService implements OnModuleInit { return { aggregated: false }; } - // Find the updated notification const updated_notification_repost = updated_doc_repost.notifications?.find( (n: any) => { if (n.type !== NotificationType.REPOST) return false; @@ -532,7 +683,6 @@ export class NotificationsService implements OnModuleInit { } ); - // If we can't find the updated notification, treat as non-aggregated if (!updated_notification_repost) { return { aggregated: false }; } @@ -540,6 +690,7 @@ export class NotificationsService implements OnModuleInit { return { aggregated: true, old_notification: { + id: old_notification._id ? old_notification._id.toString() : undefined, type: old_notification.type, created_at: old_notification.created_at, tweet_id: old_notification.tweet_id, @@ -550,7 +701,6 @@ export class NotificationsService implements OnModuleInit { } default: - // Quote and Reply notifications are not aggregated return { aggregated: false }; } } @@ -563,6 +713,133 @@ export class NotificationsService implements OnModuleInit { this.notificationsGateway.sendToUser(notification_type, user_id, payload); } + private async getTweetsWithInteractions( + tweet_ids: string[], + user_id: string, + flag: boolean = false + ): Promise { + if (tweet_ids.length === 0) return []; + + let query = this.tweet_repository.createQueryBuilder('tweet'); + + if (flag) { + query = query + .leftJoinAndMapOne( + 'tweet.current_user_like', + TweetLike, + 'like', + 'like.tweet_id = tweet.tweet_id AND like.user_id = :user_id', + { user_id } + ) + .leftJoinAndMapOne( + 'tweet.current_user_repost', + TweetRepost, + 'repost', + 'repost.tweet_id = tweet.tweet_id AND repost.user_id = :user_id', + { user_id } + ) + .leftJoinAndMapOne( + 'tweet.current_user_bookmark', + TweetBookmark, + 'bookmark', + 'bookmark.tweet_id = tweet.tweet_id AND bookmark.user_id = :user_id', + { user_id } + ); + } + query = query.where('tweet.tweet_id IN (:...tweet_ids)', { tweet_ids }); + return query.getMany(); + } + + private async getUsersWithRelationships( + user_ids: string[], + current_user_id: string, + flag: boolean = false + ): Promise { + if (user_ids.length === 0) return []; + + const columns = this.user_repository.metadata.columns + .map((col) => `user.${col.propertyName}`) + .filter((name) => !name.includes('password') && !name.includes('fcm_token')); + + let query = this.user_repository.createQueryBuilder('user').select(columns); + + if (flag) { + query = query + .leftJoinAndMapOne( + 'user.relation_following', + UserFollows, + 'following', + 'following.follower_id = :current_user_id AND following.followed_id = user.id', + { current_user_id } + ) + .leftJoinAndMapOne( + 'user.relation_follower', + UserFollows, + 'follower', + 'follower.followed_id = :current_user_id AND follower.follower_id = user.id', + { current_user_id } + ) + .leftJoinAndMapOne( + 'user.relation_blocked', + UserBlocks, + 'blocked', + 'blocked.blocker_id = :current_user_id AND blocked.blocked_id = user.id', + { current_user_id } + ) + .leftJoinAndMapOne( + 'user.relation_muted', + UserMutes, + 'muted', + 'muted.muter_id = :current_user_id AND muted.muted_id = user.id', + { current_user_id } + ); + } + query = query.where('user.id IN (:...user_ids)', { user_ids }); + return query.getMany(); + } + + private enrichUserWithStatus(user: User): any { + const user_dto = plainToInstance(UserResponseDTO, user, { + excludeExtraneousValues: true, + }) as any; + user_dto.is_following = !!(user as any).relation_following; + user_dto.is_follower = !!(user as any).relation_follower; + user_dto.is_blocked = !!(user as any).relation_blocked; + user_dto.is_muted = !!(user as any).relation_muted; + return user_dto; + } + + private cleanUser(user: User): any { + const user_dto = plainToInstance(UserResponseDTO, user, { + excludeExtraneousValues: true, + }) as any; + delete user_dto.is_following; + delete user_dto.is_follower; + delete user_dto.is_blocked; + delete user_dto.is_muted; + return user_dto; + } + + private enrichTweetWithStatus(tweet: Tweet): any { + const tweet_dto = plainToInstance(TweetResponseDTO, tweet, { + excludeExtraneousValues: true, + }) as any; + tweet_dto.is_liked = !!(tweet as any).current_user_like; + tweet_dto.is_reposted = !!(tweet as any).current_user_repost; + tweet_dto.is_bookmarked = !!(tweet as any).current_user_bookmark; + return tweet_dto; + } + + private cleanTweet(tweet: Tweet): any { + const tweet_dto = plainToInstance(TweetResponseDTO, tweet, { + excludeExtraneousValues: true, + }) as any; + delete tweet_dto.is_liked; + delete tweet_dto.is_reposted; + delete tweet_dto.is_bookmarked; + return tweet_dto; + } + async getUserNotifications( user_id: string, page: number = 1 @@ -598,7 +875,13 @@ export class NotificationsService implements OnModuleInit { } const user_ids = new Set(); + const user_ids_needing_relationships = new Set(); const tweet_ids = new Set(); + const tweet_ids_needing_interactions = new Set(); + + user_notifications.notifications.sort( + (a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime() + ); user_notifications.notifications.forEach((notification: any) => { switch (notification.type) { @@ -623,7 +906,6 @@ export class NotificationsService implements OnModuleInit { } } if (like_notification.tweet_id) { - // Collect ALL tweet IDs for aggregated notifications if (Array.isArray(like_notification.tweet_id)) { like_notification.tweet_id.forEach((id) => tweet_ids.add(id)); } else { @@ -636,9 +918,11 @@ export class NotificationsService implements OnModuleInit { const quote_notification = notification as QuoteNotificationEntity; if (quote_notification.quoted_by) { user_ids.add(quote_notification.quoted_by); + user_ids_needing_relationships.add(quote_notification.quoted_by); } if (quote_notification.quote_tweet_id) { tweet_ids.add(quote_notification.quote_tweet_id); + tweet_ids_needing_interactions.add(quote_notification.quote_tweet_id); } if (quote_notification.parent_tweet_id) { tweet_ids.add(quote_notification.parent_tweet_id); @@ -649,9 +933,12 @@ export class NotificationsService implements OnModuleInit { const reply_notification = notification as ReplyNotificationEntity; if (reply_notification.replied_by) { user_ids.add(reply_notification.replied_by); + user_ids_needing_relationships.add(reply_notification.replied_by); + user_ids_needing_relationships.add(reply_notification.replied_by); } if (reply_notification.reply_tweet_id) { tweet_ids.add(reply_notification.reply_tweet_id); + tweet_ids_needing_interactions.add(reply_notification.reply_tweet_id); } if (reply_notification.original_tweet_id) { tweet_ids.add(reply_notification.original_tweet_id); @@ -668,7 +955,6 @@ export class NotificationsService implements OnModuleInit { } } if (repost_notification.tweet_id) { - // Collect ALL tweet IDs for aggregated notifications if (Array.isArray(repost_notification.tweet_id)) { repost_notification.tweet_id.forEach((id) => tweet_ids.add(id)); } else { @@ -681,33 +967,66 @@ export class NotificationsService implements OnModuleInit { const mention_notification = notification as MentionNotificationEntity; if (mention_notification.mentioned_by) { user_ids.add(mention_notification.mentioned_by); + user_ids_needing_relationships.add(mention_notification.mentioned_by); } if (mention_notification.tweet_id) { tweet_ids.add(mention_notification.tweet_id); + tweet_ids_needing_interactions.add(mention_notification.tweet_id); } if (mention_notification.parent_tweet_id) { tweet_ids.add(mention_notification.parent_tweet_id); } break; } + case NotificationType.MESSAGE: { + const message_notification = notification as MessageNotificationEntity; + if (message_notification.sent_by) { + user_ids.add(message_notification.sent_by); + } + break; + } } }); - // Fetch all data in parallel - const [users, tweets] = await Promise.all([ - user_ids.size > 0 - ? this.user_repository.find({ - where: { id: In(Array.from(user_ids)) }, - select: ['id', 'username', 'name', 'avatar_url', 'email'], - }) + const tweet_ids_array = Array.from(tweet_ids); + const ids_needing_interactions = tweet_ids_array.filter((id) => + tweet_ids_needing_interactions.has(id) + ); + const ids_not_needing_interactions = tweet_ids_array.filter( + (id) => !tweet_ids_needing_interactions.has(id) + ); + + const user_ids_array = Array.from(user_ids); + const user_ids_needing_rel_array = user_ids_array.filter((id) => + user_ids_needing_relationships.has(id) + ); + const user_ids_not_needing_rel_array = user_ids_array.filter( + (id) => !user_ids_needing_relationships.has(id) + ); + + const [ + users_with_rel, + users_without_rel, + tweets_with_interactions, + tweets_without_interactions, + ] = await Promise.all([ + user_ids_needing_rel_array.length > 0 + ? this.getUsersWithRelationships(user_ids_needing_rel_array, user_id, true) : [], - tweet_ids.size > 0 - ? this.tweet_repository.find({ - where: { tweet_id: In(Array.from(tweet_ids)) }, - }) + user_ids_not_needing_rel_array.length > 0 + ? this.getUsersWithRelationships(user_ids_not_needing_rel_array, user_id, false) + : [], + ids_needing_interactions.length > 0 + ? this.getTweetsWithInteractions(ids_needing_interactions, user_id, true) + : [], + ids_not_needing_interactions.length > 0 + ? this.getTweetsWithInteractions(ids_not_needing_interactions, user_id, false) : [], ]); + const users = [...users_with_rel, ...users_without_rel]; + const tweets = [...tweets_with_interactions, ...tweets_without_interactions]; + const user_map = new Map( users.map((user) => [user.id, user] as [string, User]) ); @@ -720,6 +1039,8 @@ export class NotificationsService implements OnModuleInit { const response_notifications: NotificationDto[] = user_notifications.notifications .map((notification: any) => { + if (!notification._id) return null; + const notification_id = notification._id.toString(); switch (notification.type) { case NotificationType.FOLLOW: { const follow_notification = notification as FollowNotificationEntity; @@ -737,14 +1058,15 @@ export class NotificationsService implements OnModuleInit { if (!user) { missing_user_ids.add(id); } - return user; + return user ? this.cleanUser(user) : undefined; }) - .filter((user): user is User => user !== undefined); + .filter((user) => user !== undefined); if (followers.length === 0) { return null; } return { + id: notification_id, type: notification.type, created_at: notification.created_at, followers, @@ -753,7 +1075,6 @@ export class NotificationsService implements OnModuleInit { case NotificationType.LIKE: { const like_notification = notification as LikeNotificationEntity; - // Skip notifications with missing tweet_id if ( !like_notification.tweet_id || like_notification.tweet_id.length === 0 @@ -761,15 +1082,14 @@ export class NotificationsService implements OnModuleInit { return null; } - // Get ALL tweet IDs as an array const tweet_ids_array = Array.isArray(like_notification.tweet_id) ? like_notification.tweet_id : [like_notification.tweet_id as any]; - // Map all tweet IDs to tweet objects const tweets = tweet_ids_array .map((id) => tweet_map.get(id)) - .filter((tweet): tweet is Tweet => tweet !== undefined); + .filter((tweet) => tweet !== undefined) + .map((tweet) => this.cleanTweet(tweet)); if (tweets.length === 0) { tweet_ids_array.forEach((id) => missing_tweet_ids.add(id)); @@ -793,15 +1113,16 @@ export class NotificationsService implements OnModuleInit { if (!user) { missing_user_ids.add(id); } - return user; + return user ? this.cleanUser(user) : undefined; }) - .filter((user): user is User => user !== undefined); + .filter((user) => user !== undefined); if (likers.length === 0) { return null; } return { + id: notification_id, type: notification.type, created_at: notification.created_at, likers, @@ -825,17 +1146,18 @@ export class NotificationsService implements OnModuleInit { } return null; } - // Nest parent_tweet inside quote_tweet + const quote_tweet_with_parent = { - ...quote_tweet, - parent_tweet, + ...this.enrichTweetWithStatus(quote_tweet), + parent_tweet: this.cleanTweet(parent_tweet), }; return { + id: notification_id, type: notification.type, created_at: notification.created_at, - quoter, + quoter: this.enrichUserWithStatus(quoter), quote_tweet: quote_tweet_with_parent, - } as NotificationDto; + } as unknown as NotificationDto; } case NotificationType.REPLY: { const reply_notification = notification as ReplyNotificationEntity; @@ -845,7 +1167,6 @@ export class NotificationsService implements OnModuleInit { : null; const original_tweet = tweet_map.get(reply_notification.original_tweet_id); - // We need replier and original_tweet, reply_tweet is optional if (!replier || !original_tweet) { if (!replier && reply_notification.replied_by) { missing_user_ids.add(reply_notification.replied_by); @@ -859,18 +1180,20 @@ export class NotificationsService implements OnModuleInit { return null; } return { + id: notification_id, type: notification.type, created_at: notification.created_at, - replier, - reply_tweet, - original_tweet, + replier: this.enrichUserWithStatus(replier), + reply_tweet: reply_tweet + ? this.enrichTweetWithStatus(reply_tweet) + : null, + original_tweet: this.cleanTweet(original_tweet), conversation_id: reply_notification.conversation_id, - } as NotificationDto; + } as unknown as NotificationDto; } case NotificationType.REPOST: { const repost_notification = notification as RepostNotificationEntity; - // Skip notifications with missing tweet_id if ( !repost_notification.tweet_id || repost_notification.tweet_id.length === 0 @@ -878,15 +1201,14 @@ export class NotificationsService implements OnModuleInit { return null; } - // Get ALL tweet IDs as an array const tweet_ids_array = Array.isArray(repost_notification.tweet_id) ? repost_notification.tweet_id : [repost_notification.tweet_id as any]; - // Map all tweet IDs to tweet objects const tweets = tweet_ids_array .map((id) => tweet_map.get(id)) - .filter((tweet): tweet is Tweet => tweet !== undefined); + .filter((tweet) => tweet !== undefined) + .map((tweet) => this.cleanTweet(tweet)); if (tweets.length === 0) { tweet_ids_array.forEach((id) => missing_tweet_ids.add(id)); @@ -910,15 +1232,16 @@ export class NotificationsService implements OnModuleInit { if (!user) { missing_user_ids.add(id); } - return user; + return user ? this.cleanUser(user) : undefined; }) - .filter((user): user is User => user !== undefined); + .filter((user) => user !== undefined); if (reposters.length === 0) { return null; } return { + id: notification_id, type: notification.type, created_at: notification.created_at, reposters, @@ -940,7 +1263,6 @@ export class NotificationsService implements OnModuleInit { return null; } - // For quote tweets, include parent_tweet if available let mention_tweet = tweet; if ( mention_notification.tweet_type === 'quote' && @@ -952,7 +1274,7 @@ export class NotificationsService implements OnModuleInit { if (parent_tweet) { mention_tweet = { ...tweet, - parent_tweet, + parent_tweet: this.cleanTweet(parent_tweet), } as any; } else { missing_tweet_ids.add(mention_notification.parent_tweet_id); @@ -960,20 +1282,38 @@ export class NotificationsService implements OnModuleInit { } return { + id: notification_id, type: notification.type, created_at: notification.created_at, - mentioner, - tweet: mention_tweet, + mentioner: this.enrichUserWithStatus(mentioner), + tweet: this.enrichTweetWithStatus(mention_tweet), tweet_type: mention_notification.tweet_type, }; } + case NotificationType.MESSAGE: { + const message_notification = notification as MessageNotificationEntity; + const sender = user_map.get(message_notification.sent_by); + + if (!sender) { + missing_user_ids.add(message_notification.sent_by); + return null; + } + + return { + id: notification_id, + type: notification.type, + created_at: notification.created_at, + sender: this.cleanUser(sender), + message_id: message_notification.message_id, + chat_id: message_notification.chat_id, + } as unknown as NotificationDto; + } default: return null; } }) - .filter((notification): notification is NotificationDto => notification !== null); + .filter((notification) => notification !== null); - // Deduplicate notifications: merge those with same type, same people, and same tweet const deduplicated_notifications = this.deduplicateNotifications(response_notifications); // Clean notifications with missing tweets @@ -998,8 +1338,19 @@ export class NotificationsService implements OnModuleInit { const skip = (page - 1) * page_size; const paginated_notifications = deduplicated_notifications.slice(skip, skip + page_size); + const filtered_paginated_notifications = paginated_notifications.filter((notification) => { + if (notification.type === NotificationType.REPLY) { + return !(notification as any).replier?.is_blocked; + } else if (notification.type === NotificationType.MENTION) { + return !(notification as any).mentioner?.is_blocked; + } else if (notification.type === NotificationType.QUOTE) { + return !(notification as any).quoter?.is_blocked; + } + return true; + }); + return { - notifications: paginated_notifications, + notifications: filtered_paginated_notifications, page, page_size, total, @@ -1023,7 +1374,6 @@ export class NotificationsService implements OnModuleInit { }> { const page_size = 10; - // Get all notifications from MongoDB const user_notifications = await this.notificationModel .findOne({ user: user_id }) .lean() @@ -1045,7 +1395,6 @@ export class NotificationsService implements OnModuleInit { }; } - // Filter to only include mentions and replies from raw MongoDB data const filtered_notifications = user_notifications.notifications.filter( (notification: any) => notification.type === NotificationType.MENTION || @@ -1064,18 +1413,21 @@ export class NotificationsService implements OnModuleInit { }; } - // Collect user IDs and tweet IDs from filtered notifications const user_ids = new Set(); + const user_ids_needing_relationships = new Set(); const tweet_ids = new Set(); + const tweet_ids_needing_interactions = new Set(); filtered_notifications.forEach((notification: any) => { if (notification.type === NotificationType.MENTION) { const mention_notification = notification as MentionNotificationEntity; if (mention_notification.mentioned_by) { user_ids.add(mention_notification.mentioned_by); + user_ids_needing_relationships.add(mention_notification.mentioned_by); } if (mention_notification.tweet_id) { tweet_ids.add(mention_notification.tweet_id); + tweet_ids_needing_interactions.add(mention_notification.tweet_id); } if (mention_notification.parent_tweet_id) { tweet_ids.add(mention_notification.parent_tweet_id); @@ -1084,9 +1436,11 @@ export class NotificationsService implements OnModuleInit { const reply_notification = notification as ReplyNotificationEntity; if (reply_notification.replied_by) { user_ids.add(reply_notification.replied_by); + user_ids_needing_relationships.add(reply_notification.replied_by); } if (reply_notification.reply_tweet_id) { tweet_ids.add(reply_notification.reply_tweet_id); + tweet_ids_needing_interactions.add(reply_notification.reply_tweet_id); } if (reply_notification.original_tweet_id) { tweet_ids.add(reply_notification.original_tweet_id); @@ -1094,21 +1448,45 @@ export class NotificationsService implements OnModuleInit { } }); - // Fetch all required data in parallel - const [users, tweets] = await Promise.all([ - user_ids.size > 0 - ? this.user_repository.find({ - where: { id: In(Array.from(user_ids)) }, - select: ['id', 'username', 'name', 'avatar_url', 'email'], - }) + const tweet_ids_array = Array.from(tweet_ids); + const ids_needing_interactions = tweet_ids_array.filter((id) => + tweet_ids_needing_interactions.has(id) + ); + const ids_not_needing_interactions = tweet_ids_array.filter( + (id) => !tweet_ids_needing_interactions.has(id) + ); + + const user_ids_array = Array.from(user_ids); + const user_ids_needing_rel_array = user_ids_array.filter((id) => + user_ids_needing_relationships.has(id) + ); + const user_ids_not_needing_rel_array = user_ids_array.filter( + (id) => !user_ids_needing_relationships.has(id) + ); + + const [ + users_with_rel, + users_without_rel, + tweets_with_interactions, + tweets_without_interactions, + ] = await Promise.all([ + user_ids_needing_rel_array.length > 0 + ? this.getUsersWithRelationships(user_ids_needing_rel_array, user_id, true) : [], - tweet_ids.size > 0 - ? this.tweet_repository.find({ - where: { tweet_id: In(Array.from(tweet_ids)) }, - }) + user_ids_not_needing_rel_array.length > 0 + ? this.getUsersWithRelationships(user_ids_not_needing_rel_array, user_id, false) + : [], + ids_needing_interactions.length > 0 + ? this.getTweetsWithInteractions(ids_needing_interactions, user_id, true) + : [], + ids_not_needing_interactions.length > 0 + ? this.getTweetsWithInteractions(ids_not_needing_interactions, user_id, false) : [], ]); + const users = [...users_with_rel, ...users_without_rel]; + const tweets = [...tweets_with_interactions, ...tweets_without_interactions]; + const user_map = new Map( users.map((user) => [user.id, user] as [string, User]) ); @@ -1118,9 +1496,9 @@ export class NotificationsService implements OnModuleInit { const missing_tweet_ids = new Set(); - // Process filtered notifications const response_notifications: NotificationDto[] = filtered_notifications .map((notification: any) => { + if (!notification._id) return null; if (notification.type === NotificationType.MENTION) { const mention_notification = notification as MentionNotificationEntity; const mentioner = user_map.get(mention_notification.mentioned_by); @@ -1133,7 +1511,6 @@ export class NotificationsService implements OnModuleInit { return null; } - // For quote tweets, include parent_tweet if available let mention_tweet = tweet; if ( mention_notification.tweet_type === 'quote' && @@ -1143,7 +1520,7 @@ export class NotificationsService implements OnModuleInit { if (parent_tweet) { mention_tweet = { ...tweet, - parent_tweet, + parent_tweet: this.cleanTweet(parent_tweet), } as any; } else { missing_tweet_ids.add(mention_notification.parent_tweet_id); @@ -1151,10 +1528,11 @@ export class NotificationsService implements OnModuleInit { } return { + id: notification._id ? notification._id.toString() : 'unknown', type: notification.type, created_at: notification.created_at, - mentioner, - tweet: mention_tweet, + mentioner: this.enrichUserWithStatus(mentioner), + tweet: this.enrichTweetWithStatus(mention_tweet), tweet_type: mention_notification.tweet_type, }; } else if (notification.type === NotificationType.REPLY) { @@ -1176,19 +1554,19 @@ export class NotificationsService implements OnModuleInit { } return { + id: notification._id ? notification._id.toString() : 'unknown', type: notification.type, created_at: notification.created_at, - replier, - reply_tweet, - original_tweet, + replier: this.enrichUserWithStatus(replier), + reply_tweet: reply_tweet ? this.enrichTweetWithStatus(reply_tweet) : null, + original_tweet: this.cleanTweet(original_tweet), conversation_id: reply_notification.conversation_id, - } as NotificationDto; + } as unknown as NotificationDto; } return null; }) - .filter((notification): notification is NotificationDto => notification !== null); + .filter((notification) => notification !== null); - // Clean up notifications with missing tweets if (missing_tweet_ids.size > 0) { await this.clear_jobs_service.queueClearNotification({ user_id, @@ -1202,8 +1580,17 @@ export class NotificationsService implements OnModuleInit { const skip = (page - 1) * page_size; const paginated_notifications = response_notifications.slice(skip, skip + page_size); + const filtered_paginated_notifications = paginated_notifications.filter((notification) => { + if (notification.type === NotificationType.REPLY) { + return !(notification as any).replier?.is_blocked; + } else if (notification.type === NotificationType.MENTION) { + return !(notification as any).mentioner?.is_blocked; + } + return true; + }); + return { - notifications: paginated_notifications, + notifications: filtered_paginated_notifications, page, page_size, total, @@ -1222,7 +1609,6 @@ export class NotificationsService implements OnModuleInit { switch (notification.type) { case NotificationType.LIKE: { const like_notification = notification as any; - // Create key based on type + sorted user IDs + sorted tweet IDs const user_ids = like_notification.likers ?.map((u: any) => u.id) @@ -1236,7 +1622,6 @@ export class NotificationsService implements OnModuleInit { key = `like:${user_ids}:${tweet_ids}`; if (map.has(key)) { - // Keep the one with the most recent created_at const existing = map.get(key)!; if (new Date(notification.created_at) > new Date(existing.created_at)) { map.set(key, notification); @@ -1248,7 +1633,6 @@ export class NotificationsService implements OnModuleInit { } case NotificationType.REPOST: { const repost_notification = notification as any; - // Create key based on type + sorted user IDs + sorted tweet IDs const user_ids = repost_notification.reposters ?.map((u: any) => u.id) @@ -1262,7 +1646,6 @@ export class NotificationsService implements OnModuleInit { key = `repost:${user_ids}:${tweet_ids}`; if (map.has(key)) { - // Keep the one with the most recent created_at const existing = map.get(key)!; if (new Date(notification.created_at) > new Date(existing.created_at)) { map.set(key, notification); @@ -1274,7 +1657,6 @@ export class NotificationsService implements OnModuleInit { } case NotificationType.FOLLOW: { const follow_notification = notification as any; - // Create key based on type + sorted user IDs const user_ids = follow_notification.followers ?.map((u: any) => u.id) @@ -1283,7 +1665,6 @@ export class NotificationsService implements OnModuleInit { key = `follow:${user_ids}`; if (map.has(key)) { - // Keep the one with the most recent created_at const existing = map.get(key)!; if (new Date(notification.created_at) > new Date(existing.created_at)) { map.set(key, notification); @@ -1294,7 +1675,6 @@ export class NotificationsService implements OnModuleInit { break; } default: - // For REPLY and QUOTE, use unique key (no deduplication) key = `${notification.type}:${notification.created_at.toString()}:${Math.random()}`; map.set(key, notification); break; @@ -1306,7 +1686,6 @@ export class NotificationsService implements OnModuleInit { async deleteNotificationsByTweetIds(user_id: string, tweet_ids: string[]): Promise { try { - // Delete notifications where any tweet-related field matches the provided tweet IDs for (const tweet_id of tweet_ids) { await this.notificationModel.updateOne( { user: user_id }, @@ -1336,9 +1715,7 @@ export class NotificationsService implements OnModuleInit { missing_user_ids: string[] ): Promise { try { - // Remove user IDs from arrays in aggregated notifications (FOLLOW, LIKE, REPOST) for (const missing_user_id of missing_user_ids) { - // Remove from follower_id arrays in FOLLOW notifications await this.notificationModel.updateOne( { user: user_id }, { @@ -1348,7 +1725,6 @@ export class NotificationsService implements OnModuleInit { } ); - // Remove from liked_by arrays in LIKE notifications await this.notificationModel.updateOne( { user: user_id }, { @@ -1358,7 +1734,6 @@ export class NotificationsService implements OnModuleInit { } ); - // Remove from reposted_by arrays in REPOST notifications await this.notificationModel.updateOne( { user: user_id }, { @@ -1368,7 +1743,6 @@ export class NotificationsService implements OnModuleInit { } ); - // Remove entire notifications where the user is the primary actor (QUOTE, REPLY, MENTION) await this.notificationModel.updateOne( { user: user_id }, { @@ -1385,7 +1759,6 @@ export class NotificationsService implements OnModuleInit { ); } - // Clean up notifications with empty arrays (FOLLOW, LIKE, REPOST) await this.notificationModel.updateOne( { user: user_id }, { @@ -1406,14 +1779,29 @@ export class NotificationsService implements OnModuleInit { } } - async removeFollowNotification(user_id: string, follower_id: string): Promise { + async removeFollowNotification(user_id: string, follower_id: string): Promise { try { - // Calculate the date 1 day ago const one_day_ago = new Date(); one_day_ago.setDate(one_day_ago.getDate() - 1); const now = new Date(); - // First, try to remove the follower from an aggregated notification + const user_document = await this.notificationModel.findOne({ user: user_id }).lean(); + if (!user_document || !user_document.notifications) return null; + + const notification_index = user_document.notifications.findIndex( + (n: any) => + n.type === NotificationType.FOLLOW && + new Date(n.created_at) >= one_day_ago && + (Array.isArray(n.follower_id) + ? n.follower_id.includes(follower_id) + : n.follower_id === follower_id) + ); + + if (notification_index === -1) return null; + + const notification = user_document.notifications[notification_index] as any; + const notification_id = notification._id ? notification._id.toString() : null; + const result = await this.notificationModel.updateOne( { user: user_id }, { @@ -1434,7 +1822,6 @@ export class NotificationsService implements OnModuleInit { } ); - // Then, remove any follow notifications with empty follower_id arrays const cleanup_result = await this.notificationModel.updateOne( { user: user_id }, { @@ -1448,8 +1835,9 @@ export class NotificationsService implements OnModuleInit { } ); - // Return true if any modification was made - return result.modifiedCount > 0 || cleanup_result.modifiedCount > 0; + return result.modifiedCount > 0 || cleanup_result.modifiedCount > 0 + ? notification_id + : null; } catch (error) { console.error('Error removing follow notification:', error); throw error; @@ -1460,21 +1848,18 @@ export class NotificationsService implements OnModuleInit { user_id: string, tweet_id: string, liked_by: string - ): Promise { + ): Promise { try { - // Calculate the date 1 day ago const one_day_ago = new Date(); one_day_ago.setDate(one_day_ago.getDate() - 1); const now = new Date(); - // First, check for aggregated notifications const user_document = await this.notificationModel.findOne({ user: user_id }).lean(); if (!user_document || !user_document.notifications) { - return false; + return null; } - // Find the notification that contains the like const notification_index = user_document.notifications.findIndex((n: any) => { if (n.type !== NotificationType.LIKE) return false; if (new Date(n.created_at) < one_day_ago) return false; @@ -1486,10 +1871,11 @@ export class NotificationsService implements OnModuleInit { }); if (notification_index === -1) { - return false; + return null; } const notification = user_document.notifications[notification_index] as any; + const notification_id = notification._id ? notification._id.toString() : undefined; const tweet_id_array = Array.isArray(notification.tweet_id) ? notification.tweet_id : [notification.tweet_id]; @@ -1497,13 +1883,11 @@ export class NotificationsService implements OnModuleInit { ? notification.liked_by : [notification.liked_by]; - // Determine if this is aggregated by tweet or by person const is_single_tweet = tweet_id_array.length === 1; const is_single_person = liked_by_array.length === 1; let modified = false; if (is_single_tweet && is_single_person) { - // Not aggregated const result = await this.notificationModel.updateOne( { user: user_id }, { @@ -1519,7 +1903,6 @@ export class NotificationsService implements OnModuleInit { ); modified = result.modifiedCount > 0; } else if (is_single_tweet) { - // Aggregated by tweet, remove the person const result = await this.notificationModel.updateOne( { user: user_id }, { @@ -1542,7 +1925,6 @@ export class NotificationsService implements OnModuleInit { ); modified = result.modifiedCount > 0; } else if (is_single_person) { - // Aggregated by person, remove the tweet const result = await this.notificationModel.updateOne( { user: user_id }, { @@ -1566,7 +1948,6 @@ export class NotificationsService implements OnModuleInit { modified = result.modifiedCount > 0; } - // Clean up notifications with empty arrays const cleanup_result = await this.notificationModel.updateOne( { user: user_id }, { @@ -1580,7 +1961,7 @@ export class NotificationsService implements OnModuleInit { } ); - return modified || cleanup_result.modifiedCount > 0; + return modified || cleanup_result.modifiedCount > 0 ? notification_id : null; } catch (error) { console.error('Error removing like notification:', error); throw error; @@ -1591,21 +1972,18 @@ export class NotificationsService implements OnModuleInit { user_id: string, tweet_id: string, reposted_by: string - ): Promise { + ): Promise { try { - // Calculate the date 1 day ago const one_day_ago = new Date(); one_day_ago.setDate(one_day_ago.getDate() - 1); const now = new Date(); - // First, check for aggregated notifications const user_document = await this.notificationModel.findOne({ user: user_id }).lean(); if (!user_document || !user_document.notifications) { - return false; + return null; } - // Find the notification that contains the repost const notification_index = user_document.notifications.findIndex((n: any) => { if (n.type !== NotificationType.REPOST) return false; if (new Date(n.created_at) < one_day_ago) return false; @@ -1619,10 +1997,11 @@ export class NotificationsService implements OnModuleInit { }); if (notification_index === -1) { - return false; + return null; } const notification = user_document.notifications[notification_index] as any; + const notification_id = notification._id ? notification._id.toString() : undefined; const tweet_id_array = Array.isArray(notification.tweet_id) ? notification.tweet_id : [notification.tweet_id]; @@ -1630,13 +2009,11 @@ export class NotificationsService implements OnModuleInit { ? notification.reposted_by : [notification.reposted_by]; - // Determine if this is aggregated by tweet or by person const is_single_tweet = tweet_id_array.length === 1; const is_single_person = reposted_by_array.length === 1; let modified = false; if (is_single_tweet && is_single_person) { - // Not aggregated const result = await this.notificationModel.updateOne( { user: user_id }, { @@ -1652,7 +2029,6 @@ export class NotificationsService implements OnModuleInit { ); modified = result.modifiedCount > 0; } else if (is_single_tweet) { - // Aggregated by tweet, remove the person const result = await this.notificationModel.updateOne( { user: user_id }, { @@ -1675,7 +2051,6 @@ export class NotificationsService implements OnModuleInit { ); modified = result.modifiedCount > 0; } else if (is_single_person) { - // Aggregated by person, remove the tweet const result = await this.notificationModel.updateOne( { user: user_id }, { @@ -1699,7 +2074,6 @@ export class NotificationsService implements OnModuleInit { modified = result.modifiedCount > 0; } - // Clean up notifications with empty arrays const cleanup_result = await this.notificationModel.updateOne( { user: user_id }, { @@ -1713,7 +2087,7 @@ export class NotificationsService implements OnModuleInit { } ); - return modified || cleanup_result.modifiedCount > 0; + return modified || cleanup_result.modifiedCount > 0 ? notification_id : null; } catch (error) { console.error('Error removing repost notification:', error); throw error; @@ -1724,12 +2098,27 @@ export class NotificationsService implements OnModuleInit { user_id: string, reply_tweet_id: string, replied_by: string - ): Promise { + ): Promise { try { - // Calculate the date 1 day ago const one_day_ago = new Date(); one_day_ago.setDate(one_day_ago.getDate() - 1); + const user_document = await this.notificationModel.findOne({ user: user_id }).lean(); + if (!user_document || !user_document.notifications) return null; + + const notification_index = user_document.notifications.findIndex( + (n: any) => + n.type === NotificationType.REPLY && + n.reply_tweet_id === reply_tweet_id && + n.replied_by === replied_by && + new Date(n.created_at) >= one_day_ago + ); + + if (notification_index === -1) return null; + + const notification = user_document.notifications[notification_index] as any; + const notification_id = notification._id ? notification._id.toString() : null; + const result = await this.notificationModel.updateOne( { user: user_id }, { @@ -1744,7 +2133,7 @@ export class NotificationsService implements OnModuleInit { } ); - return result.modifiedCount > 0; + return result.modifiedCount > 0 ? notification_id : null; } catch (error) { console.error('Error removing reply notification:', error); throw error; @@ -1755,12 +2144,27 @@ export class NotificationsService implements OnModuleInit { user_id: string, quote_tweet_id: string, quoted_by: string - ): Promise { + ): Promise { try { - // Calculate the date 1 day ago const one_day_ago = new Date(); one_day_ago.setDate(one_day_ago.getDate() - 1); + const user_document = await this.notificationModel.findOne({ user: user_id }).lean(); + if (!user_document || !user_document.notifications) return null; + + const notification_index = user_document.notifications.findIndex( + (n: any) => + n.type === NotificationType.QUOTE && + n.quote_tweet_id === quote_tweet_id && + n.quoted_by === quoted_by && + new Date(n.created_at) >= one_day_ago + ); + + if (notification_index === -1) return null; + + const notification = user_document.notifications[notification_index] as any; + const notification_id = notification._id ? notification._id.toString() : undefined; + const result = await this.notificationModel.updateOne( { user: user_id }, { @@ -1775,7 +2179,7 @@ export class NotificationsService implements OnModuleInit { } ); - return result.modifiedCount > 0; + return result.modifiedCount > 0 ? notification_id : null; } catch (error) { console.error('Error removing quote notification:', error); throw error; @@ -1786,11 +2190,27 @@ export class NotificationsService implements OnModuleInit { user_id: string, tweet_id: string, mentioned_by: string - ): Promise { + ): Promise { try { const one_day_ago = new Date(); one_day_ago.setDate(one_day_ago.getDate() - 1); + const user_document = await this.notificationModel.findOne({ user: user_id }).lean(); + if (!user_document || !user_document.notifications) return null; + + const notification_index = user_document.notifications.findIndex( + (n: any) => + n.type === NotificationType.MENTION && + n.tweet_id === tweet_id && + n.mentioned_by === mentioned_by && + new Date(n.created_at) >= one_day_ago + ); + + if (notification_index === -1) return null; + + const notification = user_document.notifications[notification_index] as any; + const notification_id = notification._id ? notification._id.toString() : undefined; + const result = await this.notificationModel.updateOne( { user: user_id }, { @@ -1805,7 +2225,7 @@ export class NotificationsService implements OnModuleInit { } ); - return result.modifiedCount > 0; + return result.modifiedCount > 0 ? notification_id : null; } catch (error) { console.error('Error removing mention notification:', error); throw error; @@ -1821,7 +2241,6 @@ export class NotificationsService implements OnModuleInit { const user_ids = new Set(); const tweet_ids = new Set(); - // Collect user IDs and tweet IDs based on notification type switch (notification.type) { case NotificationType.FOLLOW: { const follow_notification = notification as FollowNotificationEntity; @@ -1873,6 +2292,10 @@ export class NotificationsService implements OnModuleInit { } // Fetch all data in parallel + const should_fetch_tweet_user = + notification.type !== NotificationType.LIKE && + notification.type !== NotificationType.REPOST; + const [users, tweets] = await Promise.all([ user_ids.size > 0 ? this.user_repository.find({ @@ -1883,6 +2306,7 @@ export class NotificationsService implements OnModuleInit { tweet_ids.size > 0 ? this.tweet_repository.find({ where: { tweet_id: In(Array.from(tweet_ids)) }, + relations: should_fetch_tweet_user ? ['user'] : [], }) : [], ]); @@ -1897,7 +2321,6 @@ export class NotificationsService implements OnModuleInit { const missing_tweet_ids = new Set(); const missing_user_ids = new Set(); - // Build the notification DTO based on type switch (notification.type) { case NotificationType.FOLLOW: { const follow_notification = notification as FollowNotificationEntity; @@ -1911,9 +2334,9 @@ export class NotificationsService implements OnModuleInit { if (!user) { missing_user_ids.add(id); } - return user; + return user ? this.cleanUser(user) : undefined; }) - .filter((user): user is User => user !== undefined); + .filter((user) => user !== undefined); // Clean up missing user IDs if any if (missing_user_ids.size > 0) { @@ -1924,6 +2347,7 @@ export class NotificationsService implements OnModuleInit { } return { + id: notification._id ? notification._id.toString() : null, type: notification.type, created_at: notification.created_at, followers, @@ -1941,9 +2365,9 @@ export class NotificationsService implements OnModuleInit { if (!tweet) { missing_tweet_ids.add(id); } - return tweet; + return tweet ? this.cleanTweet(tweet) : undefined; }) - .filter((tweet): tweet is Tweet => tweet !== undefined); + .filter((tweet) => tweet !== undefined); const liked_by_ids = Array.isArray(like_notification.liked_by) ? like_notification.liked_by @@ -1955,9 +2379,9 @@ export class NotificationsService implements OnModuleInit { if (!user) { missing_user_ids.add(id); } - return user; + return user ? this.cleanUser(user) : undefined; }) - .filter((user): user is User => user !== undefined); + .filter((user) => user !== undefined); // Clean up missing tweet IDs if any if (missing_tweet_ids.size > 0) { @@ -1976,6 +2400,7 @@ export class NotificationsService implements OnModuleInit { } return { + id: notification._id ? notification._id.toString() : null, type: notification.type, created_at: notification.created_at, likers, @@ -1994,9 +2419,9 @@ export class NotificationsService implements OnModuleInit { if (!tweet) { missing_tweet_ids.add(id); } - return tweet; + return tweet ? this.cleanTweet(tweet) : undefined; }) - .filter((tweet): tweet is Tweet => tweet !== undefined); + .filter((tweet) => tweet !== undefined); const reposted_by_ids = Array.isArray(repost_notification.reposted_by) ? repost_notification.reposted_by @@ -2008,9 +2433,9 @@ export class NotificationsService implements OnModuleInit { if (!user) { missing_user_ids.add(id); } - return user; + return user ? this.cleanUser(user) : undefined; }) - .filter((user): user is User => user !== undefined); + .filter((user) => user !== undefined); // Clean up missing tweet IDs if any if (missing_tweet_ids.size > 0) { @@ -2029,6 +2454,7 @@ export class NotificationsService implements OnModuleInit { } return { + id: notification._id ? notification._id.toString() : null, type: notification.type, created_at: notification.created_at, reposters, diff --git a/src/redis/redis.service.ts b/src/redis/redis.service.ts index 39c884ce..43bed40f 100644 --- a/src/redis/redis.service.ts +++ b/src/redis/redis.service.ts @@ -131,4 +131,19 @@ export class RedisService { pipeline() { return this.redis_client.pipeline(); } + + async keys(pattern: string): Promise { + return this.redis_client.keys(pattern); + } + + async zrem(key: string, ...members: string[]): Promise { + return this.redis_client.zrem(key, ...members); + } + + async deleteByPrefix(prefix: string): Promise { + const keys = await this.redis_client.keys(`${prefix}*`); + if (keys.length > 0) { + await this.redis_client.del(...keys); + } + } } diff --git a/src/search/dto/search-query.dto.ts b/src/search/dto/search-query.dto.ts index d2042c3d..5807d2c0 100644 --- a/src/search/dto/search-query.dto.ts +++ b/src/search/dto/search-query.dto.ts @@ -1,18 +1,8 @@ import { ApiPropertyOptional } from '@nestjs/swagger'; -import { IsEnum, IsInt, IsOptional, IsString, Max, Min } from 'class-validator'; +import { IsInt, IsOptional, IsString, Max, Min } from 'class-validator'; import { BasicQueryDto } from './basic-query.dto'; import { Type } from 'class-transformer'; -export enum PeopleFilter { - ANYONE = 'anyone', - FOLLOWING = 'following', -} - -export enum LocationFilter { - ANYWHERE = 'anywhere', - NEAR_YOU = 'near_you', -} - export class SearchQueryDto extends BasicQueryDto { @ApiPropertyOptional({ description: 'Cursor for pagination (format: "timestamp_userId")', diff --git a/src/search/search.controller.spec.ts b/src/search/search.controller.spec.ts index 65c43d52..39bd1e91 100644 --- a/src/search/search.controller.spec.ts +++ b/src/search/search.controller.spec.ts @@ -20,6 +20,7 @@ describe('SearchController', () => { searchUsers: jest.fn(), searchPosts: jest.fn(), searchLatestPosts: jest.fn(), + getMentionSuggestions: jest.fn(), }; const module: TestingModule = await Test.createTestingModule({ @@ -75,13 +76,13 @@ describe('SearchController', () => { const get_suggestions = jest .spyOn(search_service, 'getSuggestions') - .mockResolvedValueOnce(undefined); + .mockResolvedValueOnce(mock_response); const result = await controller.getSuggestions(current_user_id, query_dto); expect(get_suggestions).toHaveBeenCalledWith(current_user_id, query_dto); expect(get_suggestions).toHaveBeenCalledTimes(1); - expect(result).toEqual(undefined); + expect(result).toEqual(mock_response); }); }); @@ -162,6 +163,7 @@ describe('SearchController', () => { 'https://example.com/image2.jpg', ], videos: ['https://example.com/video1.mp4'], + mentions: [], user: { id: '323926cd-4fdb-4880-85f5-a31aa983bc79', username: 'alyaa2242', @@ -182,6 +184,7 @@ describe('SearchController', () => { 'https://example.com/image2.jpg', ], videos: ['https://example.com/video1.mp4'], + mentions: ['blah'], user: { id: '323926cd-4fdb-4880-85f5-a31aa983bc79', username: 'alyaa2242', @@ -244,6 +247,7 @@ describe('SearchController', () => { 'https://example.com/image2.jpg', ], videos: ['https://example.com/video1.mp4'], + mentions: [], user: { id: '323926cd-4fdb-4880-85f5-a31aa983bc79', username: 'alyaa2242', @@ -264,6 +268,7 @@ describe('SearchController', () => { 'https://example.com/image2.jpg', ], videos: ['https://example.com/video1.mp4'], + mentions: ['blah'], user: { id: '323926cd-4fdb-4880-85f5-a31aa983bc79', username: 'alyaa2242', @@ -312,4 +317,42 @@ describe('SearchController', () => { expect(result).toEqual(mock_response); }); }); + + describe('getMentionSuggestions', () => { + it('should call search_service.getMentionSuggestions with the current user id and query dto', async () => { + const mock_response = [ + { + user_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + name: 'Alyaa Ali', + username: 'Alyaali242', + avatar_url: 'https://cdn.app.com/profiles/u877.jpg', + is_following: true, + is_follower: false, + }, + { + user_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + name: 'Alia Mohamed', + username: 'alyaa#222', + avatar_url: 'https://cdn.app.com/profiles/u877.jpg', + is_following: false, + is_follower: false, + }, + ]; + + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: BasicQueryDto = { + query: 'aly', + }; + + const get_suggestions = jest + .spyOn(search_service, 'getMentionSuggestions') + .mockResolvedValueOnce(mock_response); + + const result = await controller.getMentionSuggestions(current_user_id, query_dto); + + expect(get_suggestions).toHaveBeenCalledWith(current_user_id, query_dto); + expect(get_suggestions).toHaveBeenCalledTimes(1); + expect(result).toEqual(mock_response); + }); + }); }); diff --git a/src/search/search.controller.ts b/src/search/search.controller.ts index 1e6a00d2..9d93414e 100644 --- a/src/search/search.controller.ts +++ b/src/search/search.controller.ts @@ -4,6 +4,7 @@ import { ApiOkResponse, ApiOperation, ApiTags } from '@nestjs/swagger'; import { ApiBadRequestErrorResponse } from 'src/decorators/swagger-error-responses.decorator'; import { ResponseMessage } from 'src/decorators/response-message.decorator'; import { + get_mention_suggestions_swagger, get_suggestions_swagger, search_latest_posts, search_users_swagger, @@ -62,4 +63,17 @@ export class SearchController { ) { return await this.search_service.searchLatestPosts(current_user_id, query_dto); } + + @UseGuards(JwtAuthGuard) + @ApiOperation(get_mention_suggestions_swagger.operation) + @ApiOkResponse(get_mention_suggestions_swagger.responses.success) + @ApiBadRequestErrorResponse(ERROR_MESSAGES.INVALID_SEARCH_QUERY) + @ResponseMessage(SUCCESS_MESSAGES.SUGGESTIONS_RETRIEVED) + @Get('mention-suggestions') + async getMentionSuggestions( + @GetUserId() current_user_id: string, + @Query() query_dto: BasicQueryDto + ) { + return await this.search_service.getMentionSuggestions(current_user_id, query_dto); + } } diff --git a/src/search/search.service.spec.ts b/src/search/search.service.spec.ts index 142a14a9..c2f234e2 100644 --- a/src/search/search.service.spec.ts +++ b/src/search/search.service.spec.ts @@ -6,13 +6,16 @@ import { SearchQueryDto } from './dto/search-query.dto'; import { PostsSearchDto } from './dto/post-search.dto'; import { ELASTICSEARCH_INDICES } from 'src/elasticsearch/schemas'; import { DataSource } from 'typeorm'; -import { mock } from 'node:test'; +import { RedisService } from 'src/redis/redis.service'; +import { TweetType } from 'src/shared/enums/tweet-types.enum'; +import { BasicQueryDto } from './dto/basic-query.dto'; describe('SearchService', () => { let service: SearchService; let elasticsearch_service: jest.Mocked; let user_repository: jest.Mocked; let data_source: jest.Mocked; + let redis_service: jest.Mocked; beforeEach(async () => { const mock_elasticsearch_service = { @@ -41,12 +44,17 @@ describe('SearchService', () => { query: jest.fn(), }; + const mock_redis_service = { + zrevrange: jest.fn(), + }; + const module: TestingModule = await Test.createTestingModule({ providers: [ SearchService, { provide: ElasticsearchService, useValue: mock_elasticsearch_service }, { provide: UserRepository, useValue: mock_user_repository }, { provide: DataSource, useValue: mock_data_source }, + { provide: RedisService, useValue: mock_redis_service }, ], }).compile(); @@ -54,6 +62,7 @@ describe('SearchService', () => { elasticsearch_service = module.get(ElasticsearchService); user_repository = module.get(UserRepository); data_source = module.get(DataSource); + redis_service = module.get(RedisService); }); afterEach(() => jest.clearAllMocks()); @@ -96,12 +105,14 @@ describe('SearchService', () => { }, ]); + redis_service.zrevrange.mockResolvedValueOnce([]); + elasticsearch_service.search.mockResolvedValueOnce({ hits: { hits: [ { - _source: { content: 'Check out technology' }, - highlight: { content: ['technology'] }, + _source: { content: 'technology is fun' }, + highlight: { content: ['technology is fun'] }, }, ], }, @@ -120,37 +131,29 @@ describe('SearchService', () => { }); expect(result.suggested_queries).toHaveLength(1); expect(result.suggested_queries[0]).toEqual({ - query: 'technology', + query: 'technology is fun', is_trending: false, }); }); - it('should return suggestions with users and queries with hashtag query', async () => { + it('should handle hashtag queries', async () => { const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; const query_dto = { query: '#tech' }; const mock_query_builder = user_repository.createQueryBuilder() as any; - mock_query_builder.getRawMany.mockResolvedValueOnce([ - { - user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', - username: 'alyaa242', - name: 'Alyaa Ali', - bio: 'Blah', - avatar_url: 'https://example.com/avatar.jpg', - verified: true, - followers: 100, - following: 50, - is_following: false, - is_follower: false, - }, - ]); + mock_query_builder.getRawMany.mockResolvedValueOnce([]); + + redis_service.zrevrange.mockResolvedValueOnce(['#technology', '150']); elasticsearch_service.search.mockResolvedValueOnce({ hits: { hits: [ { - _source: { content: 'Check out #technology' }, - highlight: { content: ['#technology'] }, + _source: { + content: 'Check out #technology', + hashtags: ['#technology'], + }, + highlight: { content: ['Check out #technology'] }, }, ], }, @@ -158,70 +161,123 @@ describe('SearchService', () => { const result = await service.getSuggestions(current_user_id, query_dto); - expect(result.suggested_users).toHaveLength(1); - expect(result.suggested_users[0]).toEqual({ - user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', - username: 'alyaa242', - name: 'Alyaa Ali', - avatar_url: 'https://example.com/avatar.jpg', - is_following: false, - is_follower: false, - }); expect(result.suggested_queries).toHaveLength(1); expect(result.suggested_queries[0]).toEqual({ query: '#technology', - is_trending: false, + is_trending: true, }); }); - it('should return suggestions with users and queries with normal query with hashtag result', async () => { + it('should sanitize special characters from query', async () => { const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; - const query_dto = { query: 'tech' }; + const query_dto = { query: 'tech!' }; const mock_query_builder = user_repository.createQueryBuilder() as any; - mock_query_builder.getRawMany.mockResolvedValueOnce([ - { - user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', - username: 'alyaa242', - name: 'Alyaa Ali', - bio: 'Blah', - avatar_url: 'https://example.com/avatar.jpg', - verified: true, - followers: 100, - following: 50, - is_following: false, - is_follower: false, - }, - ]); + mock_query_builder.getRawMany.mockResolvedValueOnce([]); + redis_service.zrevrange.mockResolvedValueOnce([]); elasticsearch_service.search.mockResolvedValueOnce({ - hits: { - hits: [ - { - _source: { content: 'Check out #technology' }, - highlight: { content: ['#technology'] }, - }, - ], - }, + hits: { hits: [] }, } as any); const result = await service.getSuggestions(current_user_id, query_dto); - expect(result.suggested_users).toHaveLength(1); - expect(result.suggested_users[0]).toEqual({ - user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', - username: 'alyaa242', - name: 'Alyaa Ali', - avatar_url: 'https://example.com/avatar.jpg', - is_following: false, - is_follower: false, - }); - expect(result.suggested_queries).toHaveLength(1); - expect(result.suggested_queries[0]).toEqual({ - query: '#technology', - is_trending: false, + expect(result).toEqual({ + suggested_queries: [], + suggested_users: [], }); }); + it('should fetch and normalize trending hashtags', async () => { + const mock_redis_result = [ + '#technology', + '150.5', + 'javascript', + '120.3', + '#ai', + '100.0', + ]; + + redis_service.zrevrange.mockResolvedValueOnce(mock_redis_result); + + const result = await (service as any).getTrendingHashtags(); + + expect(result.size).toBe(3); + expect(result.get('#technology')).toBe(150.5); + expect(result.get('#javascript')).toBe(120.3); + expect(result.get('#ai')).toBe(100.0); + }); + + it('should return empty map when redis returns empty result', async () => { + redis_service.zrevrange.mockResolvedValueOnce([]); + + const result = await (service as any).getTrendingHashtags(); + + expect(result.size).toBe(0); + expect(result instanceof Map).toBe(true); + }); + + it('should return empty map when redis returns null', async () => { + redis_service.zrevrange.mockResolvedValueOnce(null as any); + + const result = await (service as any).getTrendingHashtags(); + + expect(result.size).toBe(0); + expect(result instanceof Map).toBe(true); + }); + + it('should return empty map when error occurs', async () => { + redis_service.zrevrange.mockRejectedValueOnce(new Error('Redis connection failed')); + + const console_spy = jest.spyOn(console, 'error').mockImplementation(); + + const result = await (service as any).getTrendingHashtags(); + + expect(result.size).toBe(0); + expect(result instanceof Map).toBe(true); + expect(console_spy).toHaveBeenCalledWith( + 'Error fetching trending hashtags:', + expect.any(Error) + ); + + console_spy.mockRestore(); + }); + + it('should normalize hashtags without # prefix', async () => { + const mock_redis_result = ['nodejs', '90.0', 'react', '85.5']; + + redis_service.zrevrange.mockResolvedValueOnce(mock_redis_result); + + const result = await (service as any).getTrendingHashtags(); + + expect(result.get('#nodejs')).toBe(90.0); + expect(result.get('#react')).toBe(85.5); + }); + + it('should handle errors in parallel execution', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + + const query_dto: BasicQueryDto = { + query: 'test', + }; + + const mock_query_builder = { + setParameters: jest.fn().mockReturnThis(), + }; + + user_repository.createQueryBuilder.mockReturnValueOnce(mock_query_builder as any); + + jest.spyOn(service as any, 'attachUserSearchQuery').mockReturnValueOnce( + mock_query_builder + ); + jest.spyOn(service as any, 'executeUsersSearch').mockRejectedValueOnce( + new Error('DB error') + ); + jest.spyOn(service as any, 'getTrendingHashtags').mockResolvedValueOnce(new Map()); + + elasticsearch_service.search.mockResolvedValueOnce({ hits: { hits: [] } } as any); + + await expect(service.getSuggestions(current_user_id, query_dto)).rejects.toThrow(); + }); }); describe('searchUsers', () => { @@ -362,149 +418,678 @@ describe('SearchService', () => { 'Invalid cursor' ); }); - }); - describe('searchPosts', () => { - it('should return empty result when query is empty', async () => { - const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; - const query_dto: PostsSearchDto = { - query: '', - limit: 20, - }; + describe('username filter', () => { + it('should apply username filter when username is provided', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: SearchQueryDto = { + query: 'alyaa', + limit: 20, + username: 'john_doe', + }; + + const mock_query_builder = user_repository.createQueryBuilder() as any; + mock_query_builder.getRawMany.mockResolvedValueOnce([ + { + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'alyaa242', + name: 'Alyaa Ali', + bio: 'Software developer', + avatar_url: 'https://example.com/avatar.jpg', + cover_url: 'https://example.com/avatar.jpg', + verified: true, + followers: 100, + following: 50, + is_following: false, + is_follower: false, + total_score: 150.5, + }, + ]); - const result = await service.searchPosts(current_user_id, query_dto); + const result = await service.searchUsers(current_user_id, query_dto); - expect(result).toEqual({ - data: [], - pagination: { - next_cursor: null, - has_more: false, - }, + expect(mock_query_builder.andWhere).toHaveBeenCalled(); + expect(mock_query_builder.setParameters).toHaveBeenCalledWith( + expect.objectContaining({ + username: 'john_doe', + }) + ); + expect(result.data).toHaveLength(1); }); - expect(elasticsearch_service.search).not.toHaveBeenCalled(); - }); - it('should search posts and return results without related tweets', async () => { - const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; - const query_dto: PostsSearchDto = { - query: 'technology', - limit: 20, - }; + it('should not apply username filter when username is not provided', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: SearchQueryDto = { + query: 'alyaa', + limit: 20, + }; - const mock_tweet = { - tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', - type: 'post', - content: 'This is a post about technology', - created_at: '2024-01-15T10:30:00Z', - updated_at: '2024-01-15T10:30:00Z', - num_likes: 10, - num_reposts: 5, - num_views: 100, - num_replies: 3, - num_quotes: 2, - author_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', - username: 'alyaa242', - name: 'Alyaa Ali', - avatar_url: 'https://example.com/avatar.jpg', - followers: 100, - following: 50, - images: [], - videos: [], - }; + const mock_query_builder = user_repository.createQueryBuilder() as any; + mock_query_builder.getRawMany.mockResolvedValueOnce([ + { + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'alyaa242', + name: 'Alyaa Ali', + bio: 'Software developer', + avatar_url: 'https://example.com/avatar.jpg', + cover_url: 'https://example.com/avatar.jpg', + verified: true, + followers: 100, + following: 50, + is_following: false, + is_follower: false, + total_score: 150.5, + }, + ]); - const mock_elasticsearch_response = { - hits: { - hits: [ - { - _source: mock_tweet, - sort: [ - 2.5, - '2024-01-15T10:30:00Z', - 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', - ], - }, - ], - }, - }; + const result = await service.searchUsers(current_user_id, query_dto); - elasticsearch_service.search.mockResolvedValueOnce(mock_elasticsearch_response as any); - elasticsearch_service.mget.mockResolvedValueOnce({ docs: [] } as any); + expect(mock_query_builder.setParameters).toHaveBeenCalledWith( + expect.objectContaining({ + username: undefined, + }) + ); + expect(result.data).toHaveLength(1); + }); - jest.spyOn(service as any, 'attachRelatedTweets').mockResolvedValueOnce([mock_tweet]); + it('should filter users who follow or are followed by target username', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: SearchQueryDto = { + query: 'alyaa', + limit: 20, + username: 'target_user', + }; - jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ - { - ...mock_tweet, - has_liked: false, - has_reposted: false, - has_bookmarked: false, - }, - ]); + const mock_query_builder = user_repository.createQueryBuilder() as any; + mock_query_builder.getRawMany.mockResolvedValueOnce([ + { + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'alyaa242', + name: 'Alyaa Ali', + bio: 'Software developer', + avatar_url: 'https://example.com/avatar.jpg', + cover_url: 'https://example.com/avatar.jpg', + verified: true, + followers: 100, + following: 50, + is_following: true, + is_follower: false, + total_score: 150.5, + }, + ]); - const result = await service.searchPosts(current_user_id, query_dto); + const result = await service.searchUsers(current_user_id, query_dto); - expect(elasticsearch_service.search).toHaveBeenCalledWith({ - index: ELASTICSEARCH_INDICES.TWEETS, - body: expect.objectContaining({ - query: { - bool: { - must: [], - should: expect.any(Array), - minimum_should_match: 1, - }, - }, - size: 21, - sort: [ - { _score: { order: 'desc' } }, - { created_at: { order: 'desc' } }, - { tweet_id: { order: 'desc' } }, - ], - }), + expect(result.data).toHaveLength(1); + expect(result.data[0].username).toBe('alyaa242'); }); - expect(result.data).toHaveLength(1); - expect(result.data[0].tweet_id).toBe('a1b2c3d4-e5f6-7890-abcd-ef1234567890'); - expect(result.data[0].content).toBe('This is a post about technology'); - expect(result.pagination.has_more).toBe(false); - expect(result.pagination.next_cursor).toBe(null); + it('should return empty results when no users match username filter', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: SearchQueryDto = { + query: 'alyaa', + limit: 20, + username: 'nonexistent_user', + }; + + const mock_query_builder = user_repository.createQueryBuilder() as any; + mock_query_builder.getRawMany.mockResolvedValueOnce([]); + + const result = await service.searchUsers(current_user_id, query_dto); + + expect(result.data).toHaveLength(0); + expect(result.pagination.has_more).toBe(false); + expect(result.pagination.next_cursor).toBeNull(); + }); }); - it('should search posts with media filter', async () => { - const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; - const query_dto: PostsSearchDto = { - query: 'technology', - limit: 20, - has_media: true, - }; + describe('cursor pagination', () => { + it('should apply cursor pagination with valid cursor', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const cursor = Buffer.from( + JSON.stringify({ + score: 150.5, + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + }) + ).toString('base64'); - const mock_tweet = { - tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', - type: 'post', - content: 'This is a post with images', - created_at: '2024-01-15T10:30:00Z', - updated_at: '2024-01-15T10:30:00Z', - num_likes: 10, - num_reposts: 5, - num_views: 100, - num_replies: 3, - num_quotes: 2, - author_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', - username: 'alyaa242', - name: 'Alyaa Ali', - avatar_url: 'https://example.com/avatar.jpg', - followers: 100, - following: 50, - images: ['https://example.com/image1.jpg'], - videos: [], - }; + const query_dto: SearchQueryDto = { + query: 'alyaa', + limit: 20, + cursor, + }; - const mock_elasticsearch_response = { - hits: { - hits: [ - { - _source: mock_tweet, - sort: [ - 2.5, + const mock_query_builder = user_repository.createQueryBuilder() as any; + mock_query_builder.getRawMany.mockResolvedValueOnce([ + { + user_id: '2b9f0017-76cc-5fb5-b725-fdd7b545ff05', + username: 'alyaa_next', + name: 'Alyaa Next', + bio: 'Software developer', + avatar_url: 'https://example.com/avatar.jpg', + cover_url: 'https://example.com/avatar.jpg', + verified: true, + followers: 90, + following: 40, + is_following: false, + is_follower: false, + total_score: 140.0, + }, + ]); + + const result = await service.searchUsers(current_user_id, query_dto); + + expect(mock_query_builder.andWhere).toHaveBeenCalled(); + expect(result.data).toHaveLength(1); + expect(result.data[0].username).toBe('alyaa_next'); + }); + + it('should handle cursor with score less than condition', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const cursor = Buffer.from( + JSON.stringify({ + score: 200.0, + user_id: 'first-user-id', + }) + ).toString('base64'); + + const query_dto: SearchQueryDto = { + query: 'alyaa', + limit: 20, + cursor, + }; + + const mock_query_builder = user_repository.createQueryBuilder() as any; + mock_query_builder.getRawMany.mockResolvedValueOnce([ + { + user_id: '3c0g1128-87dd-6gc6-c836-gee8c556gg16', + username: 'lower_score_user', + name: 'Lower Score', + bio: 'Developer', + avatar_url: 'https://example.com/avatar.jpg', + cover_url: 'https://example.com/avatar.jpg', + verified: false, + followers: 50, + following: 30, + is_following: false, + is_follower: false, + is_blocked: false, + is_muted: false, + total_score: 150.0, + }, + ]); + + const user = { + user_id: '3c0g1128-87dd-6gc6-c836-gee8c556gg16', + username: 'lower_score_user', + name: 'Lower Score', + bio: 'Developer', + avatar_url: 'https://example.com/avatar.jpg', + cover_url: 'https://example.com/avatar.jpg', + verified: false, + followers: 50, + following: 30, + is_following: false, + is_follower: false, + is_blocked: false, + is_muted: false, + }; + + const result = await service.searchUsers(current_user_id, query_dto); + + expect(result.data).toHaveLength(1); + expect(result.data[0]).toEqual(user); + }); + + it('should handle cursor with equal score and greater ID condition', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const cursor = Buffer.from( + JSON.stringify({ + score: 150.5, + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + }) + ).toString('base64'); + + const query_dto: SearchQueryDto = { + query: 'alyaa', + limit: 20, + cursor, + }; + + const mock_query_builder = user_repository.createQueryBuilder() as any; + mock_query_builder.getRawMany.mockResolvedValueOnce([ + { + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee95', + username: 'same_score_user', + name: 'Same Score User', + bio: 'Developer', + avatar_url: 'https://example.com/avatar.jpg', + cover_url: 'https://example.com/avatar.jpg', + verified: false, + followers: 100, + following: 50, + is_following: false, + is_follower: false, + total_score: 150.5, + }, + ]); + + const result = await service.searchUsers(current_user_id, query_dto); + + expect(result.data).toHaveLength(1); + }); + + it('should throw error when cursor is malformed JSON', async () => { + const query_dto: SearchQueryDto = { + query: 'alyaa', + limit: 20, + cursor: Buffer.from('not-valid-json').toString('base64'), + }; + + await expect(service.searchUsers('user-id', query_dto)).rejects.toThrow( + 'Invalid cursor' + ); + }); + + it('should throw error when cursor is not base64', async () => { + const query_dto: SearchQueryDto = { + query: 'alyaa', + limit: 20, + cursor: 'not-base64-string!!!', + }; + + await expect(service.searchUsers('user-id', query_dto)).rejects.toThrow( + 'Invalid cursor' + ); + }); + + it('should throw error when cursor has missing fields', async () => { + const cursor = Buffer.from( + JSON.stringify({ + score: 150.5, + }) + ).toString('base64'); + + const query_dto: SearchQueryDto = { + query: 'alyaa', + limit: 20, + cursor, + }; + + await expect(service.searchUsers('user-id', query_dto)).rejects.toThrow(); + }); + + it('should not apply cursor pagination when cursor is null', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: SearchQueryDto = { + query: 'alyaa', + limit: 20, + cursor: null, + }; + + const mock_query_builder = user_repository.createQueryBuilder() as any; + + mock_query_builder.getRawMany.mockResolvedValueOnce([ + { + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'alyaa242', + name: 'Alyaa Ali', + bio: 'Software developer', + avatar_url: 'https://example.com/avatar.jpg', + cover_url: 'https://example.com/avatar.jpg', + verified: true, + followers: 100, + following: 50, + is_following: false, + is_follower: false, + total_score: 150.5, + }, + ]); + + const result = await service.searchUsers(current_user_id, query_dto); + + expect(result.data).toHaveLength(1); + }); + + it('should set limit to 20 when not passed', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: SearchQueryDto = { + query: 'alyaa', + cursor: null, + }; + + const mock_query_builder = user_repository.createQueryBuilder() as any; + + mock_query_builder.getRawMany.mockResolvedValueOnce([ + { + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'alyaa242', + name: 'Alyaa Ali', + bio: 'Software developer', + avatar_url: 'https://example.com/avatar.jpg', + cover_url: 'https://example.com/avatar.jpg', + verified: true, + followers: 100, + following: 50, + is_following: false, + is_follower: false, + total_score: 150.5, + }, + ]); + + const result = await service.searchUsers(current_user_id, query_dto); + + expect(mock_query_builder.limit).toHaveBeenCalledWith(21); + }); + }); + + describe('attachUserSearchQuery coverage', () => { + it('should add is_following and is_follower joins', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: SearchQueryDto = { query: 'alyaa', limit: 20 }; + + const mock_query_builder = user_repository.createQueryBuilder() as any; + mock_query_builder.getRawMany.mockResolvedValueOnce([ + { + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'alyaa242', + name: 'Alyaa Ali', + bio: 'Software developer', + avatar_url: 'https://example.com/avatar.jpg', + cover_url: 'https://example.com/avatar.jpg', + verified: true, + followers: 100, + following: 50, + is_following: true, + is_follower: true, + total_score: 1000150.5, + }, + ]); + + const result = await service.searchUsers(current_user_id, query_dto); + + expect(mock_query_builder.leftJoin).toHaveBeenCalled(); + expect(mock_query_builder.addSelect).toHaveBeenCalled(); + expect(result.data[0].is_following).toBe(true); + expect(result.data[0].is_follower).toBe(true); + }); + + it('should exclude blocked users', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: SearchQueryDto = { query: 'alyaa', limit: 20 }; + + const mock_query_builder = user_repository.createQueryBuilder() as any; + mock_query_builder.getRawMany.mockResolvedValueOnce([ + { + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'alyaa242', + name: 'Alyaa Ali', + bio: 'Software developer', + avatar_url: 'https://example.com/avatar.jpg', + cover_url: 'https://example.com/avatar.jpg', + verified: true, + followers: 100, + following: 50, + is_following: false, + is_follower: false, + total_score: 150.5, + }, + ]); + + await service.searchUsers(current_user_id, query_dto); + + expect(mock_query_builder.andWhere).toHaveBeenCalledWith( + expect.stringContaining('user_blocks') + ); + }); + + it('should apply search_vector query with prefix', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: SearchQueryDto = { query: 'alyaa', limit: 20 }; + + const mock_query_builder = user_repository.createQueryBuilder() as any; + mock_query_builder.getRawMany.mockResolvedValueOnce([]); + + await service.searchUsers(current_user_id, query_dto); + + expect(mock_query_builder.where).toHaveBeenCalled(); + expect(mock_query_builder.setParameters).toHaveBeenCalledWith( + expect.objectContaining({ + prefix_query: expect.any(String), + }) + ); + }); + + it('should calculate total_score with boost for followed users', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: SearchQueryDto = { query: 'alyaa', limit: 20 }; + + const mock_query_builder = user_repository.createQueryBuilder() as any; + mock_query_builder.getRawMany.mockResolvedValueOnce([ + { + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'followed_user', + name: 'Followed User', + bio: 'Software developer', + avatar_url: 'https://example.com/avatar.jpg', + cover_url: 'https://example.com/avatar.jpg', + verified: true, + followers: 100, + following: 50, + is_following: true, + is_follower: false, + total_score: 1000150.5, + }, + ]); + + const user = { + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'followed_user', + name: 'Followed User', + bio: 'Software developer', + avatar_url: 'https://example.com/avatar.jpg', + cover_url: 'https://example.com/avatar.jpg', + verified: true, + followers: 100, + following: 50, + is_following: true, + is_follower: false, + }; + + const result = await service.searchUsers(current_user_id, query_dto); + + expect(result.data[0]).toEqual(user); + }); + + it('should include all user fields in select', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: SearchQueryDto = { query: 'alyaa', limit: 20 }; + + const mock_query_builder = user_repository.createQueryBuilder() as any; + mock_query_builder.getRawMany.mockResolvedValueOnce([ + { + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'alyaa242', + name: 'Alyaa Ali', + bio: 'Software developer', + avatar_url: 'https://example.com/avatar.jpg', + cover_url: 'https://example.com/cover.jpg', + verified: true, + followers: 100, + following: 50, + is_following: false, + is_follower: false, + total_score: 150.5, + }, + ]); + + const result = await service.searchUsers(current_user_id, query_dto); + + expect(mock_query_builder.select).toHaveBeenCalled(); + expect(result.data[0]).toHaveProperty('user_id'); + expect(result.data[0]).toHaveProperty('username'); + expect(result.data[0]).toHaveProperty('name'); + expect(result.data[0]).toHaveProperty('bio'); + expect(result.data[0]).toHaveProperty('avatar_url'); + expect(result.data[0]).toHaveProperty('cover_url'); + expect(result.data[0]).toHaveProperty('verified'); + expect(result.data[0]).toHaveProperty('followers'); + expect(result.data[0]).toHaveProperty('following'); + }); + }); + + describe('combined filters', () => { + it('should apply both username filter and cursor pagination', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const cursor = Buffer.from( + JSON.stringify({ + score: 150.5, + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + }) + ).toString('base64'); + + const query_dto: SearchQueryDto = { + query: 'alyaa', + limit: 20, + cursor, + username: 'target_user', + }; + + const mock_query_builder = user_repository.createQueryBuilder() as any; + mock_query_builder.getRawMany.mockResolvedValueOnce([ + { + user_id: '2b9f0017-76cc-5fb5-b725-fdd7b545ff05', + username: 'filtered_user', + name: 'Filtered User', + bio: 'Developer', + avatar_url: 'https://example.com/avatar.jpg', + cover_url: 'https://example.com/avatar.jpg', + verified: false, + followers: 80, + following: 40, + is_following: true, + is_follower: false, + is_blocked: false, + is_muted: false, + total_score: 140.0, + }, + ]); + + const result = await service.searchUsers(current_user_id, query_dto); + + expect(mock_query_builder.andWhere).toHaveBeenCalledTimes(3); + expect(mock_query_builder.setParameters).toHaveBeenCalledWith( + expect.objectContaining({ + username: 'target_user', + }) + ); + expect(result.data).toHaveLength(1); + }); + + it('should handle username filter with pagination and multiple results', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: SearchQueryDto = { + query: 'alyaa', + limit: 1, + username: 'target_user', + }; + + const mock_query_builder = user_repository.createQueryBuilder() as any; + mock_query_builder.getRawMany.mockResolvedValueOnce([ + { + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'user1', + name: 'User One', + bio: 'Developer', + avatar_url: 'https://example.com/avatar.jpg', + cover_url: 'https://example.com/avatar.jpg', + verified: false, + followers: 100, + following: 50, + is_following: true, + is_follower: false, + total_score: 160.0, + }, + { + user_id: '2b9f0017-76cc-5fb5-b725-fdd7b545ff05', + username: 'user2', + name: 'User Two', + bio: 'Developer', + avatar_url: 'https://example.com/avatar.jpg', + cover_url: 'https://example.com/avatar.jpg', + verified: false, + followers: 80, + following: 40, + is_following: false, + is_follower: true, + total_score: 150.0, + }, + ]); + + const result = await service.searchUsers(current_user_id, query_dto); + + expect(result.data).toHaveLength(1); + expect(result.pagination.has_more).toBe(true); + expect(result.pagination.next_cursor).toBeTruthy(); + }); + }); + }); + describe('searchPosts', () => { + it('should return empty result when query is empty', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: PostsSearchDto = { + query: '', + limit: 20, + }; + + const result = await service.searchPosts(current_user_id, query_dto); + + expect(result).toEqual({ + data: [], + pagination: { + next_cursor: null, + has_more: false, + }, + }); + expect(elasticsearch_service.search).not.toHaveBeenCalled(); + }); + + it('should search posts with media filter', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: PostsSearchDto = { + query: 'technology', + limit: 20, + has_media: true, + }; + + const mock_tweet = { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + type: 'tweet', + content: 'This is a post with images', + created_at: '2024-01-15T10:30:00Z', + updated_at: '2024-01-15T10:30:00Z', + num_likes: 10, + num_reposts: 5, + num_views: 100, + num_replies: 3, + num_quotes: 2, + author_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'alyaa242', + name: 'Alyaa Ali', + avatar_url: 'https://example.com/avatar.jpg', + followers: 100, + following: 50, + images: ['https://example.com/image1.jpg'], + videos: [], + }; + + const mock_elasticsearch_response = { + hits: { + hits: [ + { + _source: mock_tweet, + sort: [ + 2.5, '2024-01-15T10:30:00Z', 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', ], @@ -517,13 +1102,12 @@ describe('SearchService', () => { elasticsearch_service.mget.mockResolvedValueOnce({ docs: [] } as any); jest.spyOn(service as any, 'attachRelatedTweets').mockResolvedValueOnce([mock_tweet]); - jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ { ...mock_tweet, - has_liked: false, - has_reposted: false, - has_bookmarked: false, + is_liked: false, + is_reposted: false, + is_bookmarked: false, }, ]); @@ -532,22 +1116,37 @@ describe('SearchService', () => { expect(elasticsearch_service.search).toHaveBeenCalledWith({ index: ELASTICSEARCH_INDICES.TWEETS, body: expect.objectContaining({ - query: { - bool: { - must: [], - should: expect.any(Array), - minimum_should_match: 1, - filter: [ - { - script: { - script: { - source: "(doc['images'].size() > 0 || doc['videos'].size() > 0)", + query: expect.objectContaining({ + function_score: expect.objectContaining({ + query: expect.objectContaining({ + bool: expect.objectContaining({ + should: expect.arrayContaining([ + expect.objectContaining({ + multi_match: expect.objectContaining({ + query: 'technology', + }), + }), + ]), + filter: expect.arrayContaining([ + { + script: { + script: { + source: "(doc['images'].size() > 0 || doc['videos'].size() > 0)", + }, + }, }, - }, - }, - ], - }, - }, + ]), + }), + }), + functions: expect.arrayContaining([ + expect.objectContaining({ + field_value_factor: expect.objectContaining({ + field: 'num_likes', + }), + }), + ]), + }), + }), }), }); @@ -564,7 +1163,7 @@ describe('SearchService', () => { const mock_tweet = { tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', - type: 'post', + type: 'tweet', content: 'This is a post with images', created_at: '2024-01-15T10:30:00Z', updated_at: '2024-01-15T10:30:00Z', @@ -602,11 +1201,10 @@ describe('SearchService', () => { elasticsearch_service.mget.mockResolvedValueOnce({ docs: [] } as any); jest.spyOn(service as any, 'attachRelatedTweets').mockResolvedValueOnce([mock_tweet]); - jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ { tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', - type: 'post', + type: 'tweet', content: 'This is a post with images', created_at: '2024-01-15T10:30:00Z', updated_at: '2024-01-15T10:30:00Z', @@ -627,9 +1225,9 @@ describe('SearchService', () => { is_follower: false, is_following: false, }, - has_liked: false, - has_reposted: false, - has_bookmarked: false, + is_liked: false, + is_reposted: false, + is_bookmarked: false, }, ]); @@ -638,20 +1236,35 @@ describe('SearchService', () => { expect(elasticsearch_service.search).toHaveBeenCalledWith({ index: ELASTICSEARCH_INDICES.TWEETS, body: expect.objectContaining({ - query: { - bool: { - must: [], - should: expect.any(Array), - minimum_should_match: 1, - filter: [ - { - term: { - username: 'alyaa242', - }, - }, - ], - }, - }, + query: expect.objectContaining({ + function_score: expect.objectContaining({ + query: expect.objectContaining({ + bool: expect.objectContaining({ + should: expect.arrayContaining([ + expect.objectContaining({ + multi_match: expect.objectContaining({ + query: 'technology', + }), + }), + ]), + filter: expect.arrayContaining([ + { + term: { + username: 'alyaa242', + }, + }, + ]), + }), + }), + functions: expect.arrayContaining([ + expect.objectContaining({ + field_value_factor: expect.objectContaining({ + field: 'num_likes', + }), + }), + ]), + }), + }), }), }); @@ -659,7 +1272,7 @@ describe('SearchService', () => { expect(result.data[0].user.username).toBe('alyaa242'); }); - it('should search posts with hashtag query', async () => { + it('should search posts with hashtag query and apply trending hashtag boost', async () => { const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; const query_dto: PostsSearchDto = { query: '#technology', @@ -668,7 +1281,7 @@ describe('SearchService', () => { const mock_tweet = { tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', - type: 'post', + type: 'tweet', content: 'Post with #technology', created_at: '2024-01-15T10:30:00Z', updated_at: '2024-01-15T10:30:00Z', @@ -703,17 +1316,141 @@ describe('SearchService', () => { }, }; + const trending_hashtags = new Map([ + ['#technology', 150], + ['#ai', 100], + ]); + + jest.spyOn(service as any, 'getTrendingHashtags').mockResolvedValueOnce( + trending_hashtags + ); elasticsearch_service.search.mockResolvedValueOnce(mock_elasticsearch_response as any); elasticsearch_service.mget.mockResolvedValueOnce({ docs: [] } as any); jest.spyOn(service as any, 'attachRelatedTweets').mockResolvedValueOnce([mock_tweet]); + jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ + { + ...mock_tweet, + is_liked: false, + is_reposted: false, + is_bookmarked: false, + }, + ]); + + const result = await service.searchPosts(current_user_id, query_dto); + + expect(elasticsearch_service.search).toHaveBeenCalledWith({ + index: ELASTICSEARCH_INDICES.TWEETS, + body: expect.objectContaining({ + query: expect.objectContaining({ + function_score: expect.objectContaining({ + query: expect.objectContaining({ + bool: expect.objectContaining({ + must: expect.arrayContaining([ + { + term: { + hashtags: { + value: '#technology', + boost: 10, + }, + }, + }, + ]), + }), + }), + functions: expect.arrayContaining([ + expect.objectContaining({ + field_value_factor: expect.objectContaining({ + field: 'num_likes', + }), + }), + expect.objectContaining({ + field_value_factor: expect.objectContaining({ + field: 'num_reposts', + }), + }), + expect.objectContaining({ + filter: expect.objectContaining({ + term: { + hashtags: { value: '#technology' }, + }, + }), + weight: expect.any(Number), + }), + ]), + boost_mode: 'sum', + score_mode: 'sum', + }), + }), + }), + }); + + expect(result.data).toHaveLength(1); + expect(result.data[0].content).toContain('#technology'); + }); + + it('should search posts with both hashtag and text query with trending boost', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: PostsSearchDto = { + query: '#technology AI innovation', + limit: 20, + }; + + const mock_tweet = { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + type: 'tweet', + content: 'Post about AI innovation with #technology', + created_at: '2024-01-15T10:30:00Z', + updated_at: '2024-01-15T10:30:00Z', + num_likes: 15, + num_reposts: 8, + num_views: 200, + num_replies: 5, + num_quotes: 3, + author_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'alyaa242', + name: 'Alyaa Ali', + avatar_url: 'https://example.com/avatar.jpg', + followers: 100, + following: 50, + hashtags: ['#technology'], + images: [], + videos: [], + }; + + const mock_elasticsearch_response = { + hits: { + hits: [ + { + _source: mock_tweet, + sort: [ + 3.5, + '2024-01-15T10:30:00Z', + 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + ], + }, + ], + }, + }; + + const trending_hashtags = new Map([ + ['#technology', 200], + ['#ai', 150], + ]); + + jest.spyOn(service as any, 'getTrendingHashtags').mockResolvedValueOnce( + trending_hashtags + ); + elasticsearch_service.search.mockResolvedValueOnce(mock_elasticsearch_response as any); + elasticsearch_service.mget.mockResolvedValueOnce({ docs: [] } as any); + jest.spyOn(service as any, 'attachRelatedTweets').mockResolvedValueOnce([mock_tweet]); jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ { ...mock_tweet, - has_liked: false, - has_reposted: false, - has_bookmarked: false, + is_liked: false, + is_reposted: false, + is_bookmarked: false, }, ]); @@ -722,27 +1459,289 @@ describe('SearchService', () => { expect(elasticsearch_service.search).toHaveBeenCalledWith({ index: ELASTICSEARCH_INDICES.TWEETS, body: expect.objectContaining({ - query: { - bool: { - must: [ - { - term: { - hashtags: { - value: '#technology', - boost: 10, + query: expect.objectContaining({ + function_score: expect.objectContaining({ + query: expect.objectContaining({ + bool: expect.objectContaining({ + must: expect.arrayContaining([ + { + term: { + hashtags: { + value: '#technology', + boost: 10, + }, + }, }, - }, - }, + ]), + should: expect.arrayContaining([ + expect.objectContaining({ + multi_match: expect.objectContaining({ + query: expect.stringContaining('AI'), + fields: expect.arrayContaining([ + 'content^3', + 'username^2', + 'name', + ]), + }), + }), + ]), + minimum_should_match: 1, + }), + }), + functions: expect.any(Array), + }), + }), + }), + }); + + expect(result.data).toHaveLength(1); + }); + + it('should apply boosting with empty trending hashtags map', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: PostsSearchDto = { + query: 'technology', + limit: 20, + }; + + const mock_tweet = { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + type: 'tweet', + content: 'Post about technology', + created_at: '2024-01-15T10:30:00Z', + updated_at: '2024-01-15T10:30:00Z', + num_likes: 10, + num_reposts: 5, + num_views: 100, + num_replies: 3, + num_quotes: 2, + author_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'alyaa242', + name: 'Alyaa Ali', + avatar_url: 'https://example.com/avatar.jpg', + followers: 100, + following: 50, + hashtags: [], + images: [], + videos: [], + }; + + const mock_elasticsearch_response = { + hits: { + hits: [ + { + _source: mock_tweet, + sort: [ + 2.5, + '2024-01-15T10:30:00Z', + 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', ], - should: expect.any(Array), - minimum_should_match: 1, }, - }, + ], + }, + }; + + jest.spyOn(service as any, 'getTrendingHashtags').mockResolvedValueOnce(new Map()); + elasticsearch_service.search.mockResolvedValueOnce(mock_elasticsearch_response as any); + elasticsearch_service.mget.mockResolvedValueOnce({ docs: [] } as any); + + jest.spyOn(service as any, 'attachRelatedTweets').mockResolvedValueOnce([mock_tweet]); + jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ + { + ...mock_tweet, + is_liked: false, + is_reposted: false, + is_bookmarked: false, + }, + ]); + + const result = await service.searchPosts(current_user_id, query_dto); + + expect(elasticsearch_service.search).toHaveBeenCalledWith({ + index: ELASTICSEARCH_INDICES.TWEETS, + body: expect.objectContaining({ + query: expect.objectContaining({ + function_score: expect.objectContaining({ + functions: expect.arrayContaining([ + expect.objectContaining({ + field_value_factor: expect.objectContaining({ + field: 'num_likes', + }), + }), + ]), + }), + }), }), }); + expect(result.data).toHaveLength(1); + }); + + it('should search with multiple hashtags', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: PostsSearchDto = { + query: '#technology #ai #innovation', + limit: 20, + }; + + const mock_tweet = { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + type: 'tweet', + content: 'Post with multiple hashtags #technology #ai #innovation', + created_at: '2024-01-15T10:30:00Z', + updated_at: '2024-01-15T10:30:00Z', + num_likes: 20, + num_reposts: 10, + num_views: 300, + num_replies: 8, + num_quotes: 5, + author_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'alyaa242', + name: 'Alyaa Ali', + avatar_url: 'https://example.com/avatar.jpg', + followers: 100, + following: 50, + hashtags: ['#technology', '#ai', '#innovation'], + images: [], + videos: [], + }; + + const mock_elasticsearch_response = { + hits: { + hits: [ + { + _source: mock_tweet, + sort: [ + 5.0, + '2024-01-15T10:30:00Z', + 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + ], + }, + ], + }, + }; + + const trending_hashtags = new Map([ + ['#technology', 200], + ['#ai', 250], + ['#innovation', 180], + ]); + + jest.spyOn(service as any, 'getTrendingHashtags').mockResolvedValueOnce( + trending_hashtags + ); + elasticsearch_service.search.mockResolvedValueOnce(mock_elasticsearch_response as any); + elasticsearch_service.mget.mockResolvedValueOnce({ docs: [] } as any); + + jest.spyOn(service as any, 'attachRelatedTweets').mockResolvedValueOnce([mock_tweet]); + jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ + { + ...mock_tweet, + is_liked: false, + is_reposted: false, + is_bookmarked: false, + }, + ]); + + const result = await service.searchPosts(current_user_id, query_dto); + expect(result.data).toHaveLength(1); expect(result.data[0].content).toContain('#technology'); + expect(result.data[0].content).toContain('#ai'); + expect(result.data[0].content).toContain('#innovation'); + }); + + it('should search posts with multiple filters (media + username)', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: PostsSearchDto = { + query: 'technology', + limit: 20, + has_media: true, + username: 'alyaa242', + }; + + const mock_tweet = { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + type: 'tweet', + content: 'Tech post with media', + created_at: '2024-01-15T10:30:00Z', + updated_at: '2024-01-15T10:30:00Z', + num_likes: 10, + num_reposts: 5, + num_views: 100, + num_replies: 3, + num_quotes: 2, + author_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'alyaa242', + name: 'Alyaa Ali', + avatar_url: 'https://example.com/avatar.jpg', + followers: 100, + following: 50, + images: ['https://example.com/image1.jpg'], + videos: [], + }; + + const mock_elasticsearch_response = { + hits: { + hits: [ + { + _source: mock_tweet, + sort: [ + 2.5, + '2024-01-15T10:30:00Z', + 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + ], + }, + ], + }, + }; + + elasticsearch_service.search.mockResolvedValueOnce(mock_elasticsearch_response as any); + elasticsearch_service.mget.mockResolvedValueOnce({ docs: [] } as any); + + jest.spyOn(service as any, 'attachRelatedTweets').mockResolvedValueOnce([mock_tweet]); + jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ + { + ...mock_tweet, + is_liked: false, + is_reposted: false, + is_bookmarked: false, + }, + ]); + + const result = await service.searchPosts(current_user_id, query_dto); + + expect(elasticsearch_service.search).toHaveBeenCalledWith({ + index: ELASTICSEARCH_INDICES.TWEETS, + body: expect.objectContaining({ + query: expect.objectContaining({ + function_score: expect.objectContaining({ + query: expect.objectContaining({ + bool: expect.objectContaining({ + filter: expect.arrayContaining([ + { + script: { + script: { + source: "(doc['images'].size() > 0 || doc['videos'].size() > 0)", + }, + }, + }, + { + term: { + username: 'alyaa242', + }, + }, + ]), + }), + }), + }), + }), + }), + }); + + expect(result.data).toHaveLength(1); + expect(result.data[0].images).toBeDefined(); + expect(result.data[0].images.length).toBeGreaterThan(0); }); it('should search posts with pagination and return next_cursor', async () => { @@ -755,7 +1754,7 @@ describe('SearchService', () => { const mock_tweets = [ { tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', - type: 'post', + type: 'tweet', content: 'This is a post with images', created_at: '2024-01-15T10:30:00Z', updated_at: '2024-01-15T10:30:00Z', @@ -776,7 +1775,7 @@ describe('SearchService', () => { }, { tweet_id: 'b2c3d4e5-f6g7-8901-bcde-fg2345678901', - type: 'post', + type: 'tweet', content: 'Second post about technology', created_at: '2024-01-15T09:30:00Z', updated_at: '2024-01-15T09:30:00Z', @@ -827,9 +1826,9 @@ describe('SearchService', () => { jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ { ...mock_tweets[0], - has_liked: false, - has_reposted: false, - has_bookmarked: false, + is_liked: false, + is_reposted: false, + is_bookmarked: false, }, ]); @@ -867,7 +1866,7 @@ describe('SearchService', () => { const mock_tweet = { tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', - type: 'post', + type: 'tweet', content: 'This is a post with images', created_at: '2024-01-15T10:30:00Z', updated_at: '2024-01-15T10:30:00Z', @@ -901,7 +1900,6 @@ describe('SearchService', () => { ], }, }; - elasticsearch_service.search.mockResolvedValueOnce(mock_elasticsearch_response as any); elasticsearch_service.mget.mockResolvedValueOnce({ docs: [] } as any); @@ -910,9 +1908,9 @@ describe('SearchService', () => { jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ { ...mock_tweet, - has_liked: false, - has_reposted: false, - has_bookmarked: false, + is_liked: false, + is_reposted: false, + is_bookmarked: false, }, ]); @@ -934,10 +1932,48 @@ describe('SearchService', () => { expect(result.pagination.has_more).toBe(false); }); - it('should search posts and attach parent tweet for reply', async () => { - const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; - const query_dto: PostsSearchDto = { - query: 'reply', + it('should return null when encoding undefined cursor', () => { + const encoded = (service as any).encodeTweetsCursor(undefined); + + expect(encoded).toBeNull(); + }); + + it('should return null when encoding null cursor', () => { + const encoded = (service as any).encodeTweetsCursor(null); + + expect(encoded).toBeNull(); + }); + + it('should decode cursor successfully', () => { + const sort = [2.5, '2024-01-15T10:30:00Z', 'a1b2c3d4-e5f6-7890-abcd-ef1234567890']; + const encoded = Buffer.from(JSON.stringify(sort)).toString('base64'); + const decoded = (service as any).decodeTweetsCursor(encoded); + + expect(decoded).toEqual(sort); + }); + + it('should return null when decoding null cursor', () => { + const decoded = (service as any).decodeTweetsCursor(null); + + expect(decoded).toBeNull(); + }); + + it('should return null when decoding invalid cursor', () => { + const decoded = (service as any).decodeTweetsCursor('invalid-base64-string!!!'); + + expect(decoded).toBeNull(); + }); + + it('should return null when decoding malformed base64 cursor', () => { + const decoded = (service as any).decodeTweetsCursor('YWJjZGVmZ2g='); + + expect(decoded).toBeNull(); + }); + + it('should search posts and attach parent tweet for reply', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: PostsSearchDto = { + query: 'reply', limit: 20, }; @@ -998,7 +2034,7 @@ describe('SearchService', () => { num_quotes: 0, author_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', username: 'replyuser', - name: 'Alyaa Ali', + name: 'Reply User', avatar_url: 'https://example.com/reply-avatar.jpg', followers: 50, following: 25, @@ -1036,6 +2072,7 @@ describe('SearchService', () => { ], }; + jest.spyOn(service as any, 'getTrendingHashtags').mockResolvedValueOnce(new Map()); elasticsearch_service.search.mockResolvedValueOnce(mock_elasticsearch_response as any); elasticsearch_service.mget.mockResolvedValueOnce(mock_mget_response as any); @@ -1043,8 +2080,9 @@ describe('SearchService', () => { Promise.resolve( tweets.map((tweet) => ({ ...tweet, - has_liked: false, - has_reposted: false, + is_liked: false, + is_reposted: false, + is_bookmarked: false, })) ) ); @@ -1073,71 +2111,52 @@ describe('SearchService', () => { ); }); - it('should return empty result on elasticsearch error', async () => { + it('should search posts and attach parent tweet for quote', async () => { const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; const query_dto: PostsSearchDto = { - query: 'technology', - limit: 20, - }; - - elasticsearch_service.search.mockRejectedValueOnce(new Error('Elasticsearch error')); - - const result = await service.searchPosts(current_user_id, query_dto); - - expect(result).toEqual({ - data: [], - pagination: { - next_cursor: null, - has_more: false, - }, - }); - }); - }); - - describe('searchLatestPosts', () => { - it('should return empty result when query is empty', async () => { - const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; - const query_dto: SearchQueryDto = { - query: '', + query: 'quote', limit: 20, }; - const result = await service.searchLatestPosts(current_user_id, query_dto); - - expect(result).toEqual({ - data: [], - pagination: { - next_cursor: null, - has_more: false, - }, - }); - expect(elasticsearch_service.search).not.toHaveBeenCalled(); - }); - - it('should search latest posts sorted by created_at', async () => { - const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; - const query_dto: SearchQueryDto = { - query: 'latest', - limit: 20, + const mock_parent_tweet = { + tweet_id: 'parent-quote-id', + type: 'post', + content: 'Original quoted post', + created_at: '2024-01-15T09:00:00Z', + updated_at: '2024-01-15T09:00:00Z', + num_likes: 25, + num_reposts: 12, + num_views: 250, + num_replies: 6, + num_quotes: 4, + author_id: 'parent-author-id', + username: 'originaluser', + name: 'Original User', + avatar_url: 'https://example.com/original-avatar.jpg', + followers: 120, + following: 60, + images: [], + videos: [], }; const mock_tweet = { - tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - type: 'post', - content: 'Latest post', - created_at: '2024-01-16T10:30:00Z', - updated_at: '2024-01-16T10:30:00Z', - num_likes: 5, - num_reposts: 2, - num_views: 50, - num_replies: 1, - num_quotes: 0, - author_id: 'author-id', - username: 'alyaali', - name: 'Alyaa Ali', - avatar_url: 'https://example.com/latest-avatar.jpg', + tweet_id: 'quote-tweet-id', + type: 'quote', + content: 'Quoting this great post', + created_at: '2024-01-15T10:30:00Z', + updated_at: '2024-01-15T10:30:00Z', + parent_id: 'parent-quote-id', + num_likes: 8, + num_reposts: 4, + num_views: 80, + num_replies: 2, + num_quotes: 1, + author_id: 'quote-author-id', + username: 'quoteuser', + name: 'Quote User', + avatar_url: 'https://example.com/quote-avatar.jpg', followers: 75, - following: 40, + following: 35, images: [], videos: [], }; @@ -1147,79 +2166,76 @@ describe('SearchService', () => { hits: [ { _source: mock_tweet, - sort: [ - '2024-01-16T10:30:00Z', - 2.5, - '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - ], + sort: [2.8, '2024-01-15T10:30:00Z', 'quote-tweet-id'], }, ], }, }; - elasticsearch_service.search.mockResolvedValueOnce(mock_elasticsearch_response as any); - elasticsearch_service.mget.mockResolvedValueOnce({ docs: [] } as any); + const mock_mget_response = { + docs: [ + { + _id: 'parent-quote-id', + found: true, + _source: mock_parent_tweet, + }, + ], + }; - jest.spyOn(service as any, 'attachRelatedTweets').mockResolvedValueOnce([mock_tweet]); + jest.spyOn(service as any, 'getTrendingHashtags').mockResolvedValueOnce(new Map()); + elasticsearch_service.search.mockResolvedValueOnce(mock_elasticsearch_response as any); + elasticsearch_service.mget.mockResolvedValueOnce(mock_mget_response as any); - jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ - { - ...mock_tweet, - has_liked: false, - has_reposted: false, - has_bookmarked: false, - }, - ]); + jest.spyOn(service as any, 'attachUserInteractions').mockImplementation((tweets: any) => + Promise.resolve( + tweets.map((tweet) => ({ + ...tweet, + is_liked: false, + is_reposted: false, + is_bookmarked: false, + })) + ) + ); - const result = await service.searchLatestPosts(current_user_id, query_dto); + const result = await service.searchPosts(current_user_id, query_dto); - expect(elasticsearch_service.search).toHaveBeenCalledWith({ + expect(elasticsearch_service.mget).toHaveBeenCalledWith({ index: ELASTICSEARCH_INDICES.TWEETS, - body: expect.objectContaining({ - sort: [ - { created_at: { order: 'desc' } }, - { _score: { order: 'desc' } }, - { tweet_id: { order: 'desc' } }, - ], - }), + body: { + ids: ['parent-quote-id'], + }, }); expect(result.data).toHaveLength(1); - expect(result.data[0].tweet_id).toBe('0c059899-f706-4c8f-97d7-ba2e9fc22d6d'); + expect(result.data[0].type).toBe('quote'); + expect(result.data[0].parent_tweet).toBeDefined(); + expect(result.data[0].parent_tweet?.tweet_id).toBe('parent-quote-id'); }); - it('should search latest posts with cursor', async () => { + it('should handle posts without related tweets (regular posts)', async () => { const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; - const cursor = Buffer.from( - JSON.stringify([ - '2024-01-16T10:30:00Z', - 2.5, - '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - ]) - ).toString('base64'); - const query_dto: SearchQueryDto = { - query: 'latest', + const query_dto: PostsSearchDto = { + query: 'regular post', limit: 20, - cursor, }; const mock_tweet = { - tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + tweet_id: 'regular-post-id', type: 'post', - content: 'Next post', - created_at: '2024-01-16T09:30:00Z', - updated_at: '2024-01-16T09:30:00Z', - num_likes: 3, - num_reposts: 1, - num_views: 30, - num_replies: 0, - num_quotes: 0, - author_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - username: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - name: 'Next Alyaa Ali', - avatar_url: 'https://example.com/next-avatar.jpg', - followers: 60, - following: 30, + content: 'Just a regular post', + created_at: '2024-01-15T10:30:00Z', + updated_at: '2024-01-15T10:30:00Z', + num_likes: 15, + num_reposts: 7, + num_views: 150, + num_replies: 4, + num_quotes: 2, + author_id: 'regular-author-id', + username: 'regularuser', + name: 'Regular User', + avatar_url: 'https://example.com/regular-avatar.jpg', + followers: 90, + following: 45, images: [], videos: [], }; @@ -1229,103 +2245,795 @@ describe('SearchService', () => { hits: [ { _source: mock_tweet, - sort: [ - '2024-01-16T09:30:00Z', - 2.0, - '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - ], + sort: [2.6, '2024-01-15T10:30:00Z', 'regular-post-id'], }, ], }, }; + jest.spyOn(service as any, 'getTrendingHashtags').mockResolvedValueOnce(new Map()); elasticsearch_service.search.mockResolvedValueOnce(mock_elasticsearch_response as any); elasticsearch_service.mget.mockResolvedValueOnce({ docs: [] } as any); - jest.spyOn(service as any, 'attachRelatedTweets').mockResolvedValueOnce([mock_tweet]); + jest.spyOn(service as any, 'attachUserInteractions').mockImplementation((tweets: any) => + Promise.resolve( + tweets.map((tweet) => ({ + ...tweet, + is_liked: false, + is_reposted: false, + is_bookmarked: false, + })) + ) + ); - jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ + const result = await service.searchPosts(current_user_id, query_dto); + + expect(result.data).toHaveLength(1); + expect(result.data[0].type).toBe('post'); + expect(result.data[0].parent_tweet).toBeUndefined(); + expect(result.data[0].conversation_tweet).toBeUndefined(); + }); + + it('should return empty array when no tweets provided', async () => { + const current_user_id = 'a1b2c3d4-e5f6-7890-abcd-ef1234567890'; + + const result = await (service as any).attachUserInteractions([], current_user_id); + + expect(result).toEqual([]); + expect(data_source.query).not.toHaveBeenCalled(); + }); + + it('should attach interactions to main tweet', async () => { + const current_user_id = 'a1b2c3d4-e5f6-7890-abcd-ef1234567890'; + + const mock_tweet = { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + type: TweetType.TWEET, + content: 'Test tweet', + user: { + id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'testuser', + }, + }; + + const mock_interactions = [ { - ...mock_tweet, - has_liked: false, - has_reposted: false, - has_bookmarked: false, + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + is_liked: 1, + is_reposted: 0, + is_bookmarked: 1, + is_following: 1, + is_follower: 0, }, - ]); + ]; - const result = await service.searchLatestPosts(current_user_id, query_dto); + data_source.query.mockResolvedValueOnce(mock_interactions); - expect(elasticsearch_service.search).toHaveBeenCalledWith({ - index: ELASTICSEARCH_INDICES.TWEETS, - body: expect.objectContaining({ - search_after: [ - '2024-01-16T10:30:00Z', - 2.5, - '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - ], - }), - }); + const result = await (service as any).attachUserInteractions( + [mock_tweet], + current_user_id + ); - expect(result.data).toHaveLength(1); + expect(result).toHaveLength(1); + expect(result[0].is_liked).toBe(true); + expect(result[0].is_reposted).toBe(false); + expect(result[0].is_bookmarked).toBe(true); + expect(result[0].user.is_following).toBe(true); + expect(result[0].user.is_follower).toBe(false); }); - it('should return empty result on elasticsearch error', async () => { - const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; - const query_dto: SearchQueryDto = { - query: 'latest', - limit: 20, + it('should filter out tweet when main interaction is blocked', async () => { + const current_user_id = 'a1b2c3d4-e5f6-7890-abcd-ef1234567890'; + + const mock_tweet = { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + type: TweetType.TWEET, + content: 'Test tweet', + user: { + id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'blockeduser', + }, }; - elasticsearch_service.search.mockRejectedValueOnce(new Error('Elasticsearch error')); + data_source.query.mockResolvedValueOnce([]); - const result = await service.searchLatestPosts(current_user_id, query_dto); + const result = await (service as any).attachUserInteractions( + [mock_tweet], + current_user_id + ); - expect(result).toEqual({ - data: [], - pagination: { - next_cursor: null, - has_more: false, - }, - }); + expect(result).toHaveLength(0); }); - it('should search latest posts with hashtag query', async () => { - const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; - const query_dto: SearchQueryDto = { - query: '#javascript', - limit: 20, - }; + it('should attach interactions to tweet with parent_tweet', async () => { + const current_user_id = 'a1b2c3d4-e5f6-7890-abcd-ef1234567890'; const mock_tweet = { tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', - type: 'post', - content: 'Learning #javascript today', - created_at: '2024-01-16T10:30:00Z', - updated_at: '2024-01-16T10:30:00Z', - num_likes: 10, - num_reposts: 5, - num_views: 100, - num_replies: 3, - num_quotes: 2, - author_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', - username: 'alyaa242', - name: 'Alyaa Ali', - avatar_url: 'https://example.com/avatar.jpg', - followers: 100, - following: 50, - hashtags: ['#javascript'], - images: [], - videos: [], + type: TweetType.QUOTE, + content: 'Quote tweet', + user: { + id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'testuser', + }, + parent_tweet: { + tweet_id: 'b2c3d4e5-f6g7-8901-bcde-fg2345678901', + type: TweetType.TWEET, + content: 'Original tweet', + user: { + id: '2b9f0017-76cc-5fb5-b725-fdd7b545ff05', + username: 'originaluser', + }, + }, }; - const mock_elasticsearch_response = { + const mock_interactions = [ + { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + is_liked: 1, + is_reposted: 0, + is_bookmarked: 0, + is_following: 0, + is_follower: 0, + }, + { + tweet_id: 'b2c3d4e5-f6g7-8901-bcde-fg2345678901', + user_id: '2b9f0017-76cc-5fb5-b725-fdd7b545ff05', + is_liked: 0, + is_reposted: 1, + is_bookmarked: 1, + is_following: 1, + is_follower: 1, + }, + ]; + + data_source.query.mockResolvedValueOnce(mock_interactions); + + const result = await (service as any).attachUserInteractions( + [mock_tweet], + current_user_id + ); + + expect(result).toHaveLength(1); + expect(result[0].is_liked).toBe(true); + expect(result[0].parent_tweet.is_liked).toBe(false); + expect(result[0].parent_tweet.is_reposted).toBe(true); + expect(result[0].parent_tweet.is_bookmarked).toBe(true); + expect(result[0].parent_tweet.user.is_following).toBe(true); + expect(result[0].parent_tweet.user.is_follower).toBe(true); + }); + + it('should filter out quote tweet when parent interaction is missing', async () => { + const current_user_id = 'a1b2c3d4-e5f6-7890-abcd-ef1234567890'; + + const mock_tweet = { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + type: TweetType.QUOTE, + content: 'Quote tweet', + user: { + id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'testuser', + }, + parent_tweet: { + tweet_id: 'b2c3d4e5-f6g7-8901-bcde-fg2345678901', + type: TweetType.TWEET, + content: 'Blocked original tweet', + user: { + id: '2b9f0017-76cc-5fb5-b725-fdd7b545ff05', + username: 'blockeduser', + }, + }, + }; + + const mock_interactions = [ + { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + is_liked: 1, + is_reposted: 0, + is_bookmarked: 0, + is_following: 0, + is_follower: 0, + }, + ]; + + data_source.query.mockResolvedValueOnce(mock_interactions); + + const result = await (service as any).attachUserInteractions( + [mock_tweet], + current_user_id + ); + + expect(result).toHaveLength(0); + }); + + it('should attach interactions to tweet with conversation_tweet', async () => { + const current_user_id = 'a1b2c3d4-e5f6-7890-abcd-ef1234567890'; + + const mock_tweet = { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + type: TweetType.REPLY, + content: 'Reply tweet', + user: { + id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'testuser', + }, + parent_tweet: { + tweet_id: 'b2c3d4e5-f6g7-8901-bcde-fg2345678901', + type: TweetType.TWEET, + content: 'Parent tweet', + user: { + id: '2b9f0017-76cc-5fb5-b725-fdd7b545ff05', + username: 'parentuser', + }, + }, + conversation_tweet: { + tweet_id: 'c3d4e5f6-g7h8-9012-cdef-gh3456789012', + type: TweetType.TWEET, + content: 'Conversation root', + user: { + id: '3c0g1128-87dd-6gc6-c836-gee8c656gg16', + username: 'rootuser', + }, + }, + }; + + const mock_interactions = [ + { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + is_liked: 1, + is_reposted: 0, + is_bookmarked: 0, + is_following: 0, + is_follower: 0, + }, + { + tweet_id: 'b2c3d4e5-f6g7-8901-bcde-fg2345678901', + user_id: '2b9f0017-76cc-5fb5-b725-fdd7b545ff05', + is_liked: 0, + is_reposted: 1, + is_bookmarked: 0, + is_following: 1, + is_follower: 0, + }, + { + tweet_id: 'c3d4e5f6-g7h8-9012-cdef-gh3456789012', + user_id: '3c0g1128-87dd-6gc6-c836-gee8c656gg16', + is_liked: 1, + is_reposted: 1, + is_bookmarked: 1, + is_following: 0, + is_follower: 1, + }, + ]; + + data_source.query.mockResolvedValueOnce(mock_interactions); + + const result = await (service as any).attachUserInteractions( + [mock_tweet], + current_user_id + ); + + expect(result).toHaveLength(1); + expect(result[0].is_liked).toBe(true); + expect(result[0].parent_tweet.is_reposted).toBe(true); + expect(result[0].parent_tweet.user.is_following).toBe(true); + expect(result[0].conversation_tweet.is_liked).toBe(true); + expect(result[0].conversation_tweet.is_bookmarked).toBe(true); + expect(result[0].conversation_tweet.user.is_follower).toBe(true); + }); + + it('should filter out reply when parent interaction is missing', async () => { + const current_user_id = 'a1b2c3d4-e5f6-7890-abcd-ef1234567890'; + + const mock_tweet = { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + type: TweetType.REPLY, + content: 'Reply tweet', + user: { + id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'testuser', + }, + parent_tweet: { + tweet_id: 'b2c3d4e5-f6g7-8901-bcde-fg2345678901', + type: TweetType.TWEET, + content: 'Blocked parent', + user: { + id: '2b9f0017-76cc-5fb5-b725-fdd7b545ff05', + username: 'blockeduser', + }, + }, + conversation_tweet: { + tweet_id: 'c3d4e5f6-g7h8-9012-cdef-gh3456789012', + type: TweetType.TWEET, + content: 'Conversation root', + user: { + id: '3c0g1128-87dd-6gc6-c836-gee8c656gg16', + username: 'rootuser', + }, + }, + }; + + const mock_interactions = [ + { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + is_liked: 1, + is_reposted: 0, + is_bookmarked: 0, + is_following: 0, + is_follower: 0, + }, + { + tweet_id: 'c3d4e5f6-g7h8-9012-cdef-gh3456789012', + user_id: '3c0g1128-87dd-6gc6-c836-gee8c656gg16', + is_liked: 1, + is_reposted: 1, + is_bookmarked: 1, + is_following: 0, + is_follower: 1, + }, + ]; + + data_source.query.mockResolvedValueOnce(mock_interactions); + + const result = await (service as any).attachUserInteractions( + [mock_tweet], + current_user_id + ); + + expect(result).toHaveLength(0); + }); + + it('should filter out reply when conversation interaction is missing', async () => { + const current_user_id = 'a1b2c3d4-e5f6-7890-abcd-ef1234567890'; + + const mock_tweet = { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + type: TweetType.REPLY, + content: 'Reply tweet', + user: { + id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'testuser', + }, + parent_tweet: { + tweet_id: 'b2c3d4e5-f6g7-8901-bcde-fg2345678901', + type: TweetType.TWEET, + content: 'Parent tweet', + user: { + id: '2b9f0017-76cc-5fb5-b725-fdd7b545ff05', + username: 'parentuser', + }, + }, + conversation_tweet: { + tweet_id: 'c3d4e5f6-g7h8-9012-cdef-gh3456789012', + type: TweetType.TWEET, + content: 'Blocked conversation root', + user: { + id: '3c0g1128-87dd-6gc6-c836-gee8c656gg16', + username: 'blockeduser', + }, + }, + }; + + const mock_interactions = [ + { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + is_liked: 1, + is_reposted: 0, + is_bookmarked: 0, + is_following: 0, + is_follower: 0, + }, + { + tweet_id: 'b2c3d4e5-f6g7-8901-bcde-fg2345678901', + user_id: '2b9f0017-76cc-5fb5-b725-fdd7b545ff05', + is_liked: 0, + is_reposted: 1, + is_bookmarked: 0, + is_following: 1, + is_follower: 0, + }, + ]; + + data_source.query.mockResolvedValueOnce(mock_interactions); + + const result = await (service as any).attachUserInteractions( + [mock_tweet], + current_user_id + ); + + expect(result).toHaveLength(0); + }); + + it('should handle multiple tweets with mixed interactions', async () => { + const current_user_id = 'a1b2c3d4-e5f6-7890-abcd-ef1234567890'; + + const mock_tweets = [ + { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + type: TweetType.TWEET, + content: 'First tweet', + user: { + id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'user1', + }, + }, + { + tweet_id: 'b2c3d4e5-f6g7-8901-bcde-fg2345678901', + type: TweetType.TWEET, + content: 'Second tweet', + user: { + id: '2b9f0017-76cc-5fb5-b725-fdd7b545ff05', + username: 'user2', + }, + }, + ]; + + const mock_interactions = [ + { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + is_liked: 1, + is_reposted: 1, + is_bookmarked: 1, + is_following: 1, + is_follower: 1, + }, + { + tweet_id: 'b2c3d4e5-f6g7-8901-bcde-fg2345678901', + user_id: '2b9f0017-76cc-5fb5-b725-fdd7b545ff05', + is_liked: 0, + is_reposted: 0, + is_bookmarked: 0, + is_following: 0, + is_follower: 0, + }, + ]; + + data_source.query.mockResolvedValueOnce(mock_interactions); + + const result = await (service as any).attachUserInteractions( + mock_tweets, + current_user_id + ); + + expect(result).toHaveLength(2); + expect(result[0].is_liked).toBe(true); + expect(result[0].is_reposted).toBe(true); + expect(result[1].is_liked).toBe(false); + expect(result[1].is_reposted).toBe(false); + }); + + it('should handle tweet without parent_tweet when parent_interaction is undefined', async () => { + const current_user_id = 'a1b2c3d4-e5f6-7890-abcd-ef1234567890'; + + const mock_tweet = { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + type: TweetType.TWEET, + content: 'Tweet without parent', + user: { + id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'testuser', + }, + }; + + const mock_interactions = [ + { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + is_liked: 1, + is_reposted: 0, + is_bookmarked: 0, + is_following: 0, + is_follower: 0, + }, + ]; + + data_source.query.mockResolvedValueOnce(mock_interactions); + + const result = await (service as any).attachUserInteractions( + [mock_tweet], + current_user_id + ); + + expect(result).toHaveLength(1); + expect(result[0].parent_tweet).toBeUndefined(); + }); + + it('should handle tweet without conversation_tweet when conversation_interaction is undefined', async () => { + const current_user_id = 'a1b2c3d4-e5f6-7890-abcd-ef1234567890'; + + const mock_tweet = { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + type: TweetType.TWEET, + content: 'Tweet without conversation', + user: { + id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'testuser', + }, + }; + + const mock_interactions = [ + { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + user_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + is_liked: 0, + is_reposted: 0, + is_bookmarked: 0, + is_following: 0, + is_follower: 0, + }, + ]; + + data_source.query.mockResolvedValueOnce(mock_interactions); + + const result = await (service as any).attachUserInteractions( + [mock_tweet], + current_user_id + ); + + expect(result).toHaveLength(1); + expect(result[0].conversation_tweet).toBeUndefined(); + }); + + it('should return empty result on elasticsearch error', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: PostsSearchDto = { + query: 'technology', + limit: 20, + }; + + elasticsearch_service.search.mockRejectedValueOnce(new Error('Elasticsearch error')); + + const result = await service.searchPosts(current_user_id, query_dto); + + expect(result).toEqual({ + data: [], + pagination: { + next_cursor: null, + has_more: false, + }, + }); + }); + }); + + describe('searchLatestPosts', () => { + it('should return empty result when query is empty', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: SearchQueryDto = { + query: '', + limit: 20, + }; + + const result = await service.searchLatestPosts(current_user_id, query_dto); + + expect(result).toEqual({ + data: [], + pagination: { + next_cursor: null, + has_more: false, + }, + }); + expect(elasticsearch_service.search).not.toHaveBeenCalled(); + }); + + it('should search latest posts sorted by created_at', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: SearchQueryDto = { + query: 'latest', + limit: 20, + }; + + const mock_tweet = { + tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + type: 'tweet', + content: 'Latest post', + created_at: '2024-01-16T10:30:00Z', + updated_at: '2024-01-16T10:30:00Z', + num_likes: 5, + num_reposts: 2, + num_views: 50, + num_replies: 1, + num_quotes: 0, + author_id: 'author-id', + username: 'alyaali', + name: 'Alyaa Ali', + avatar_url: 'https://example.com/latest-avatar.jpg', + followers: 75, + following: 40, + images: [], + videos: [], + }; + + const mock_elasticsearch_response = { + hits: { + hits: [ + { + _source: mock_tweet, + sort: [ + '2024-01-16T10:30:00Z', + 2.5, + '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + ], + }, + ], + }, + }; + + elasticsearch_service.search.mockResolvedValueOnce(mock_elasticsearch_response as any); + elasticsearch_service.mget.mockResolvedValueOnce({ docs: [] } as any); + + jest.spyOn(service as any, 'attachRelatedTweets').mockResolvedValueOnce([mock_tweet]); + + jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ + { + ...mock_tweet, + is_liked: false, + is_reposted: false, + is_bookmarked: false, + }, + ]); + + const result = await service.searchLatestPosts(current_user_id, query_dto); + + expect(elasticsearch_service.search).toHaveBeenCalledWith({ + index: ELASTICSEARCH_INDICES.TWEETS, + body: expect.objectContaining({ + sort: [ + { created_at: { order: 'desc' } }, + { _score: { order: 'desc' } }, + { tweet_id: { order: 'desc' } }, + ], + }), + }); + + expect(result.data).toHaveLength(1); + expect(result.data[0].tweet_id).toBe('0c059899-f706-4c8f-97d7-ba2e9fc22d6d'); + }); + + it('should search latest posts with cursor', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const cursor = Buffer.from( + JSON.stringify([ + '2024-01-16T10:30:00Z', + 2.5, + '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + ]) + ).toString('base64'); + const query_dto: SearchQueryDto = { + query: 'latest', + limit: 20, + cursor, + }; + + const mock_tweet = { + tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + type: 'tweet', + content: 'Next post', + created_at: '2024-01-16T09:30:00Z', + updated_at: '2024-01-16T09:30:00Z', + num_likes: 3, + num_reposts: 1, + num_views: 30, + num_replies: 0, + num_quotes: 0, + author_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + username: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + name: 'Next Alyaa Ali', + avatar_url: 'https://example.com/next-avatar.jpg', + followers: 60, + following: 30, + images: [], + videos: [], + }; + + const mock_elasticsearch_response = { + hits: { + hits: [ + { + _source: mock_tweet, + sort: [ + '2024-01-16T09:30:00Z', + 2.0, + '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + ], + }, + ], + }, + }; + + elasticsearch_service.search.mockResolvedValueOnce(mock_elasticsearch_response as any); + elasticsearch_service.mget.mockResolvedValueOnce({ docs: [] } as any); + + jest.spyOn(service as any, 'attachRelatedTweets').mockResolvedValueOnce([mock_tweet]); + + jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ + { + ...mock_tweet, + is_liked: false, + is_reposted: false, + is_bookmarked: false, + }, + ]); + + const result = await service.searchLatestPosts(current_user_id, query_dto); + + expect(elasticsearch_service.search).toHaveBeenCalledWith({ + index: ELASTICSEARCH_INDICES.TWEETS, + body: expect.objectContaining({ + search_after: [ + '2024-01-16T10:30:00Z', + 2.5, + '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + ], + }), + }); + + expect(result.data).toHaveLength(1); + }); + + it('should return empty result on elasticsearch error', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: SearchQueryDto = { + query: 'latest', + limit: 20, + }; + + elasticsearch_service.search.mockRejectedValueOnce(new Error('Elasticsearch error')); + + const result = await service.searchLatestPosts(current_user_id, query_dto); + + expect(result).toEqual({ + data: [], + pagination: { + next_cursor: null, + has_more: false, + }, + }); + }); + + it('should search posts with username filter', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: PostsSearchDto = { + query: 'technology', + limit: 20, + username: 'alyaa242', + }; + + const mock_tweet = { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + type: 'tweet', + content: 'This is a post with images', + created_at: '2024-01-15T10:30:00Z', + updated_at: '2024-01-15T10:30:00Z', + num_likes: 10, + num_reposts: 5, + num_views: 100, + num_replies: 3, + num_quotes: 2, + author_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'alyaa242', + name: 'Alyaa Ali', + avatar_url: 'https://example.com/avatar.jpg', + followers: 100, + following: 50, + images: ['https://example.com/image1.jpg'], + videos: [], + }; + + const mock_elasticsearch_response = { hits: { hits: [ { _source: mock_tweet, sort: [ - '2024-01-16T10:30:00Z', 2.5, + '2024-01-15T10:30:00Z', 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', ], }, @@ -1337,13 +3045,139 @@ describe('SearchService', () => { elasticsearch_service.mget.mockResolvedValueOnce({ docs: [] } as any); jest.spyOn(service as any, 'attachRelatedTweets').mockResolvedValueOnce([mock_tweet]); + jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ + { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + type: 'tweet', + content: 'This is a post with images', + created_at: '2024-01-15T10:30:00Z', + updated_at: '2024-01-15T10:30:00Z', + num_likes: 10, + num_reposts: 5, + num_views: 100, + num_replies: 3, + num_quotes: 2, + images: ['https://example.com/image1.jpg'], + videos: [], + user: { + id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'alyaa242', + name: 'Alyaa Ali', + avatar_url: 'https://example.com/avatar.jpg', + followers: 100, + following: 50, + is_follower: false, + is_following: false, + }, + is_liked: false, + is_reposted: false, + is_bookmarked: false, + }, + ]); + + const result = await service.searchLatestPosts(current_user_id, query_dto); + + expect(elasticsearch_service.search).toHaveBeenCalledWith({ + index: ELASTICSEARCH_INDICES.TWEETS, + body: expect.objectContaining({ + query: expect.objectContaining({ + function_score: expect.objectContaining({ + query: expect.objectContaining({ + bool: expect.objectContaining({ + should: expect.arrayContaining([ + expect.objectContaining({ + multi_match: expect.objectContaining({ + query: 'technology', + }), + }), + ]), + filter: expect.arrayContaining([ + { + term: { + username: 'alyaa242', + }, + }, + ]), + }), + }), + functions: expect.arrayContaining([ + expect.objectContaining({ + field_value_factor: expect.objectContaining({ + field: 'num_likes', + }), + }), + ]), + }), + }), + }), + }); + + expect(result.data).toHaveLength(1); + expect(result.data[0].user.username).toBe('alyaa242'); + }); + + it('should search latest posts with hashtag query and apply trending hashtag boost', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: PostsSearchDto = { + query: '#technology', + limit: 20, + }; + + const mock_tweet = { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + type: 'tweet', + content: 'Post with #technology', + created_at: '2024-01-15T10:30:00Z', + updated_at: '2024-01-15T10:30:00Z', + num_likes: 10, + num_reposts: 5, + num_views: 100, + num_replies: 3, + num_quotes: 2, + author_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'alyaa242', + name: 'Alyaa Ali', + avatar_url: 'https://example.com/avatar.jpg', + followers: 100, + following: 50, + hashtags: ['#technology'], + images: ['https://example.com/image1.jpg'], + videos: [], + }; + + const mock_elasticsearch_response = { + hits: { + hits: [ + { + _source: mock_tweet, + sort: [ + 2.5, + '2024-01-15T10:30:00Z', + 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + ], + }, + ], + }, + }; + + const trending_hashtags = new Map([ + ['#technology', 150], + ['#ai', 100], + ]); + + jest.spyOn(service as any, 'getTrendingHashtags').mockResolvedValueOnce( + trending_hashtags + ); + elasticsearch_service.search.mockResolvedValueOnce(mock_elasticsearch_response as any); + elasticsearch_service.mget.mockResolvedValueOnce({ docs: [] } as any); + jest.spyOn(service as any, 'attachRelatedTweets').mockResolvedValueOnce([mock_tweet]); jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ { ...mock_tweet, - has_liked: false, - has_reposted: false, - has_bookmarked: false, + is_liked: false, + is_reposted: false, + is_bookmarked: false, }, ]); @@ -1352,42 +3186,174 @@ describe('SearchService', () => { expect(elasticsearch_service.search).toHaveBeenCalledWith({ index: ELASTICSEARCH_INDICES.TWEETS, body: expect.objectContaining({ - query: { - bool: { - must: [ - { - term: { - hashtags: { - value: '#javascript', - boost: 10, + query: expect.objectContaining({ + function_score: expect.objectContaining({ + query: expect.objectContaining({ + bool: expect.objectContaining({ + must: expect.arrayContaining([ + { + term: { + hashtags: { + value: '#technology', + boost: 10, + }, + }, }, - }, - }, + ]), + }), + }), + functions: expect.arrayContaining([ + expect.objectContaining({ + field_value_factor: expect.objectContaining({ + field: 'num_likes', + }), + }), + expect.objectContaining({ + field_value_factor: expect.objectContaining({ + field: 'num_reposts', + }), + }), + expect.objectContaining({ + filter: expect.objectContaining({ + term: { + hashtags: { value: '#technology' }, + }, + }), + weight: expect.any(Number), + }), + ]), + boost_mode: 'sum', + score_mode: 'sum', + }), + }), + }), + }); + + expect(result.data).toHaveLength(1); + expect(result.data[0].content).toContain('#technology'); + }); + + it('should search latest posts with both hashtag and text query with trending boost', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: PostsSearchDto = { + query: '#technology AI innovation', + limit: 20, + }; + + const mock_tweet = { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + type: 'tweet', + content: 'Post about AI innovation with #technology', + created_at: '2024-01-15T10:30:00Z', + updated_at: '2024-01-15T10:30:00Z', + num_likes: 15, + num_reposts: 8, + num_views: 200, + num_replies: 5, + num_quotes: 3, + author_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'alyaa242', + name: 'Alyaa Ali', + avatar_url: 'https://example.com/avatar.jpg', + followers: 100, + following: 50, + hashtags: ['#technology'], + images: [], + videos: [], + }; + + const mock_elasticsearch_response = { + hits: { + hits: [ + { + _source: mock_tweet, + sort: [ + 3.5, + '2024-01-15T10:30:00Z', + 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', ], - should: expect.any(Array), }, - }, + ], + }, + }; + + const trending_hashtags = new Map([ + ['#technology', 200], + ['#ai', 150], + ]); + + jest.spyOn(service as any, 'getTrendingHashtags').mockResolvedValueOnce( + trending_hashtags + ); + elasticsearch_service.search.mockResolvedValueOnce(mock_elasticsearch_response as any); + elasticsearch_service.mget.mockResolvedValueOnce({ docs: [] } as any); + + jest.spyOn(service as any, 'attachRelatedTweets').mockResolvedValueOnce([mock_tweet]); + jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ + { + ...mock_tweet, + is_liked: false, + is_reposted: false, + is_bookmarked: false, + }, + ]); + + const result = await service.searchLatestPosts(current_user_id, query_dto); + + expect(elasticsearch_service.search).toHaveBeenCalledWith({ + index: ELASTICSEARCH_INDICES.TWEETS, + body: expect.objectContaining({ + query: expect.objectContaining({ + function_score: expect.objectContaining({ + query: expect.objectContaining({ + bool: expect.objectContaining({ + must: expect.arrayContaining([ + { + term: { + hashtags: { + value: '#technology', + boost: 10, + }, + }, + }, + ]), + should: expect.arrayContaining([ + expect.objectContaining({ + multi_match: expect.objectContaining({ + query: expect.stringContaining('AI'), + fields: expect.arrayContaining([ + 'content^3', + 'username^2', + 'name', + ]), + }), + }), + ]), + minimum_should_match: 1, + }), + }), + functions: expect.any(Array), + }), + }), }), }); expect(result.data).toHaveLength(1); - expect(result.data[0].content).toContain('#javascript'); }); - it('should search latest posts with username filter', async () => { + it('should apply boosting in latest posts with empty trending hashtags map', async () => { const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; - const query_dto: SearchQueryDto = { - query: 'coding', + const query_dto: PostsSearchDto = { + query: 'technology', limit: 20, - username: 'alyaa242', }; const mock_tweet = { tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', - type: 'post', - content: 'Coding all day', - created_at: '2024-01-16T10:30:00Z', - updated_at: '2024-01-16T10:30:00Z', + type: 'tweet', + content: 'Post about technology', + created_at: '2024-01-15T10:30:00Z', + updated_at: '2024-01-15T10:30:00Z', num_likes: 10, num_reposts: 5, num_views: 100, @@ -1399,337 +3365,637 @@ describe('SearchService', () => { avatar_url: 'https://example.com/avatar.jpg', followers: 100, following: 50, + hashtags: [], images: [], videos: [], }; - const mock_elasticsearch_response = { - hits: { - hits: [ - { - _source: mock_tweet, - sort: [ - '2024-01-16T10:30:00Z', - 2.5, - 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', - ], - }, - ], + const mock_elasticsearch_response = { + hits: { + hits: [ + { + _source: mock_tweet, + sort: [ + 2.5, + '2024-01-15T10:30:00Z', + 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + ], + }, + ], + }, + }; + + jest.spyOn(service as any, 'getTrendingHashtags').mockResolvedValueOnce(new Map()); + elasticsearch_service.search.mockResolvedValueOnce(mock_elasticsearch_response as any); + elasticsearch_service.mget.mockResolvedValueOnce({ docs: [] } as any); + + jest.spyOn(service as any, 'attachRelatedTweets').mockResolvedValueOnce([mock_tweet]); + jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ + { + ...mock_tweet, + is_liked: false, + is_reposted: false, + is_bookmarked: false, + }, + ]); + + const result = await service.searchLatestPosts(current_user_id, query_dto); + + expect(elasticsearch_service.search).toHaveBeenCalledWith({ + index: ELASTICSEARCH_INDICES.TWEETS, + body: expect.objectContaining({ + query: expect.objectContaining({ + function_score: expect.objectContaining({ + functions: expect.arrayContaining([ + expect.objectContaining({ + field_value_factor: expect.objectContaining({ + field: 'num_likes', + }), + }), + ]), + }), + }), + }), + }); + + expect(result.data).toHaveLength(1); + }); + }); + + it('should search latest posts with multiple hashtags', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto: PostsSearchDto = { + query: '#technology #ai #innovation', + limit: 20, + }; + + const mock_tweet = { + tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', + type: 'tweet', + content: 'Post with multiple hashtags #technology #ai #innovation', + created_at: '2024-01-15T10:30:00Z', + updated_at: '2024-01-15T10:30:00Z', + num_likes: 20, + num_reposts: 10, + num_views: 300, + num_replies: 8, + num_quotes: 5, + author_id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', + username: 'alyaa242', + name: 'Alyaa Ali', + avatar_url: 'https://example.com/avatar.jpg', + followers: 100, + following: 50, + hashtags: ['#technology', '#ai', '#innovation'], + images: [], + videos: [], + }; + + const mock_elasticsearch_response = { + hits: { + hits: [ + { + _source: mock_tweet, + sort: [5.0, '2024-01-15T10:30:00Z', 'a1b2c3d4-e5f6-7890-abcd-ef1234567890'], + }, + ], + }, + }; + + const trending_hashtags = new Map([ + ['#technology', 200], + ['#ai', 250], + ['#innovation', 180], + ]); + + jest.spyOn(service as any, 'getTrendingHashtags').mockResolvedValueOnce(trending_hashtags); + elasticsearch_service.search.mockResolvedValueOnce(mock_elasticsearch_response as any); + elasticsearch_service.mget.mockResolvedValueOnce({ docs: [] } as any); + + jest.spyOn(service as any, 'attachRelatedTweets').mockResolvedValueOnce([mock_tweet]); + jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ + { + ...mock_tweet, + is_liked: false, + is_reposted: false, + is_bookmarked: false, + }, + ]); + + const result = await service.searchLatestPosts(current_user_id, query_dto); + + expect(result.data).toHaveLength(1); + expect(result.data[0].content).toContain('#technology'); + expect(result.data[0].content).toContain('#ai'); + expect(result.data[0].content).toContain('#innovation'); + }); + + describe('getMentionSuggestions', () => { + it('should return empty array when query is empty', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto = { query: '' }; + + const result = await service.getMentionSuggestions(current_user_id, query_dto); + + expect(result).toEqual([]); + }); + + it('should return user suggestions for mentions', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto = { query: 'alya' }; + + const mock_query_builder = user_repository.createQueryBuilder() as any; + mock_query_builder.getRawMany.mockResolvedValueOnce([ + { + user_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + username: 'alyaali', + name: 'Alyaa Ali', + avatar_url: 'https://example.com/blah.jpg', + is_following: true, + is_follower: false, + }, + { + user_id: '0c059299-f706-4c8f-97d7-ba2e9fc22d6d', + username: 'alyaa242', + name: 'Alyaaa Eissa', + avatar_url: 'https://example.com/johnny.jpg', + is_following: false, + is_follower: true, + }, + ]); + + const result = await service.getMentionSuggestions(current_user_id, query_dto); + + expect(result).toHaveLength(2); + expect(result[0].username).toBe('alyaali'); + expect(result[1].username).toBe('alyaa242'); + }); + + it('should limit mention suggestions to 10 users', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const query_dto = { query: 'user' }; + + const mock_users = Array.from({ length: 15 }, (_, i) => ({ + user_id: `user-${i}`, + username: `user${i}`, + name: `User ${i}`, + avatar_url: `https://example.com/user${i}.jpg`, + is_following: false, + is_follower: false, + })); + + const mock_query_builder = user_repository.createQueryBuilder() as any; + mock_query_builder.getRawMany.mockResolvedValueOnce(mock_users.slice(0, 10)); + + const result = await service.getMentionSuggestions(current_user_id, query_dto); + + expect(result.length).toBeLessThanOrEqual(10); + }); + }); + + describe('extractSuggestionsFromHits', () => { + const trending_hashtags = new Map([ + ['#javascript', 150], + ['#ai', 100], + ]); + + it('should extract hashtag suggestions from hits', () => { + const hits = [ + { + _source: { + hashtags: ['#javascript', '#nodejs', '#typescript'], + content: 'Learning javascript today', + }, }, - }; + ]; - elasticsearch_service.search.mockResolvedValueOnce(mock_elasticsearch_response as any); - elasticsearch_service.mget.mockResolvedValueOnce({ docs: [] } as any); + const result = (service as any).extractSuggestionsFromHits( + hits, + '#java', + trending_hashtags, + 3 + ); - jest.spyOn(service as any, 'attachRelatedTweets').mockResolvedValueOnce([mock_tweet]); + expect(result).toHaveLength(1); + expect(result[0].query).toBe('#javascript'); + expect(result[0].is_trending).toBe(true); + }); - jest.spyOn(service as any, 'attachUserInteractions').mockResolvedValueOnce([ + it('should return empty array when text is null or undefined', () => { + const hits = [ { - tweet_id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', - type: 'post', - content: 'Coding all day', - created_at: '2024-01-16T10:30:00Z', - updated_at: '2024-01-16T10:30:00Z', - num_likes: 10, - num_reposts: 5, - num_views: 100, - num_replies: 3, - num_quotes: 2, - images: [], - videos: [], - user: { - id: '1a8e9906-65bb-4fa4-a614-ecc6a434ee94', - username: 'alyaa242', - name: 'Alyaa Ali', - avatar_url: 'https://example.com/avatar.jpg', - followers: 100, - following: 50, - is_follower: false, - is_following: false, + _source: { + content: null, }, - has_liked: false, - has_reposted: false, - has_bookmarked: false, }, - ]); + { + _source: {}, + }, + ]; - const result = await service.searchLatestPosts(current_user_id, query_dto); + const result = (service as any).extractSuggestionsFromHits( + hits, + 'test', + trending_hashtags, + 3 + ); - expect(elasticsearch_service.search).toHaveBeenCalledWith({ - index: ELASTICSEARCH_INDICES.TWEETS, - body: expect.objectContaining({ - query: { - bool: { - must: [], - should: expect.any(Array), - filter: [ - { - term: { - username: 'alyaa242', - }, - }, - ], - }, + expect(result).toHaveLength(0); + }); + + it('should return empty array when query not found in text', () => { + const hits = [ + { + _source: { + content: 'This is a post about something completely different', }, - }), - }); + }, + ]; - expect(result.data).toHaveLength(1); - expect(result.data[0].user.username).toBe('alyaa242'); + const result = (service as any).extractSuggestionsFromHits( + hits, + 'javascript', + trending_hashtags, + 3 + ); + + expect(result).toHaveLength(0); }); - }); - describe('encodeCursor', () => { - it('should encode sort array to base64 cursor', () => { - const sort = [1.5, 100, '0c059899-f706-4c8f-97d7-ba2e9fc22d6d']; - const result = service['encodeCursor'](sort); + it('should skip completion when length is less than query + 3', () => { + const hits = [ + { + _source: { + content: 'test a', + }, + }, + ]; - expect(result).toBeTruthy(); - expect(typeof result).toBe('string'); + const result = (service as any).extractSuggestionsFromHits( + hits, + 'test', + trending_hashtags, + 3 + ); - const decoded = JSON.parse(Buffer.from(result as any, 'base64').toString('utf8')); - expect(decoded).toEqual(sort); + expect(result).toHaveLength(0); }); - it('should return null when sort is undefined', () => { - const result = service['encodeCursor'](undefined); + it('should skip completion when it does not start with query', () => { + const hits = [ + { + _source: { + content: 'prefix test something else that is different', + }, + }, + ]; + + const result = (service as any).extractSuggestionsFromHits( + hits, + 'blah', + trending_hashtags, + 3 + ); - expect(result).toBeNull(); + expect(result).toHaveLength(0); }); - it('should return null when sort is null', () => { - const result = service['encodeCursor'](null as any); + it('should skip completion when middle content contains punctuation', () => { + const hits = [ + { + _source: { + content: 'test!', + }, + }, + ]; + + const result = (service as any).extractSuggestionsFromHits( + hits, + 'test', + trending_hashtags, + 3 + ); - expect(result).toBeNull(); + expect(result).toHaveLength(0); }); - }); - describe('decodeCursor', () => { - it('should decode base64 cursor to sort array', () => { - const sort = [1.5, 100, '0c059899-f706-4c8f-97d7-ba2e9fc22d6d']; - const cursor = Buffer.from(JSON.stringify(sort)).toString('base64'); + it('should extract valid completion from content', () => { + const hits = [ + { + _source: { + content: 'javascript is amazing for web development. Other stuff.', + }, + }, + ]; - const result = service['decodeCursor'](cursor); + const result = (service as any).extractSuggestionsFromHits( + hits, + 'javascript', + trending_hashtags, + 3 + ); - expect(result).toEqual(sort); + expect(result).toHaveLength(1); + expect(result[0].query).toBe('javascript is amazing for web development'); + expect(result[0].is_trending).toBe(false); }); - it('should return null when cursor is null', () => { - const result = service['decodeCursor'](null); + it('should extract completion from highlighted content', () => { + const hits = [ + { + _source: { + content: 'javascript is great', + }, + highlight: { + content: ['javascript is awesome for coding'], + }, + }, + ]; + + const result = (service as any).extractSuggestionsFromHits( + hits, + 'javascript', + trending_hashtags, + 3 + ); - expect(result).toBeNull(); + expect(result).toHaveLength(1); + expect(result[0].query).toBe('javascript is awesome for coding'); }); - it('should return null when cursor is invalid base64', () => { - const result = service['decodeCursor']('invalid-cursor'); + it('should remove MARK tags from highlighted content', () => { + const hits = [ + { + highlight: { + content: ['test content with marks'], + }, + }, + ]; + + const result = (service as any).extractSuggestionsFromHits( + hits, + 'test', + trending_hashtags, + 3 + ); - expect(result).toBeNull(); + expect(result).toHaveLength(1); + expect(result[0].query).not.toContain(''); + expect(result[0].query).not.toContain(''); }); - it('should return null when cursor is not valid JSON', () => { - const invalid_cursor = Buffer.from('not-json').toString('base64'); + it('should trim and remove trailing punctuation from completion', () => { + const hits = [ + { + _source: { + content: 'javascript is amazing for development,,,', + }, + }, + ]; - const result = service['decodeCursor'](invalid_cursor); + const result = (service as any).extractSuggestionsFromHits( + hits, + 'javascript', + trending_hashtags, + 3 + ); - expect(result).toBeNull(); + expect(result).toHaveLength(1); + expect(result[0].query).toBe('javascript is amazing for development'); }); - }); - describe('applyTweetsBoosting', () => { - it('should add boosting queries to search body', () => { - const search_body = { - query: { - bool: { - must: [], - should: [], + it('should limit completion length to sentence end', () => { + const hits = [ + { + _source: { + content: 'javascript is great. This is another sentence.', }, }, - }; + ]; - service['applyTweetsBoosting'](search_body); + const result = (service as any).extractSuggestionsFromHits( + hits, + 'javascript', + trending_hashtags, + 3 + ); + + expect(result).toHaveLength(1); + expect(result[0].query).toBe('javascript is great'); + }); - expect(search_body.query.bool.should).toHaveLength(6); - expect(search_body.query.bool.should).toContainEqual({ - function_score: { - field_value_factor: { - field: 'num_likes', - factor: 0.01, - modifier: 'log1p', - missing: 0, + it('should sort suggestions with trending first, then by length', () => { + const hits = [ + { + _source: { + hashtags: ['#javascript', '#js'], }, }, - }); - expect(search_body.query.bool.should).toContainEqual({ - function_score: { - field_value_factor: { - field: 'num_reposts', - factor: 0.02, - modifier: 'log1p', - missing: 0, + { + _source: { + hashtags: ['#ai', '#artificial'], }, }, - }); - expect(search_body.query.bool.should).toContainEqual({ - function_score: { - field_value_factor: { - field: 'followers', - factor: 0.001, - modifier: 'log1p', - missing: 0, + { + _source: { + hashtags: ['#test'], }, }, - }); + ]; + + const result = (service as any).extractSuggestionsFromHits( + hits, + '#', + trending_hashtags, + 5 + ); + + expect(result[0].is_trending).toBe(true); + expect(result[1].is_trending).toBe(true); + if (result.length > 2) { + expect(result[2].is_trending).toBe(false); + } }); - }); - describe('fetchRelatedTweets', () => { - it('should fetch parent and conversation tweets', async () => { - const tweets = [ + it('should handle case-insensitive query matching', () => { + const hits = [ { - type: 'reply', - parent_id: '0c059811-f706-4c8f-97d7-ba2e9fc22d6d', - conversation_id: '0c059822-f706-4c8f-97d7-ba2e9fc22d6d', + _source: { + content: 'JavaScript is awesome for development', + }, }, + ]; + + const result = (service as any).extractSuggestionsFromHits( + hits, + 'javascript', + trending_hashtags, + 3 + ); + + expect(result).toHaveLength(1); + expect(result[0].query).toBe('JavaScript is awesome for development'); + }); + + it('should handle hashtag query without # prefix', () => { + const hits = [ { - type: 'quote', - parent_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + _source: { + hashtags: ['#javascript', '#java'], + }, }, ]; - const mock_mget_response = { - docs: [ - { - _id: '0c059811-f706-4c8f-97d7-ba2e9fc22d6d', - found: true, - _source: { - tweet_id: '0c059811-f706-4c8f-97d7-ba2e9fc22d6d', - content: 'Parent 1', - }, - }, - { - _id: '0c059822-f706-4c8f-97d7-ba2e9fc22d6d', - found: true, - _source: { - tweet_id: '0c059822-f706-4c8f-97d7-ba2e9fc22d6d', - content: 'Conversation', - }, - }, - { - _id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - found: true, - _source: { - tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - content: 'Parent 2', - }, + const result = (service as any).extractSuggestionsFromHits( + hits, + 'java', + trending_hashtags, + 3 + ); + + expect(result).toHaveLength(1); + expect(result[0].query).toBe('#javascript'); + }); + + it('should return early when hashtag matches in loop', () => { + const hits = [ + { + _source: { + hashtags: ['#test1', '#javascript', '#test2'], }, - ], - }; + }, + ]; - elasticsearch_service.mget.mockResolvedValueOnce(mock_mget_response as any); + const result = (service as any).extractSuggestionsFromHits( + hits, + '#java', + trending_hashtags, + 3 + ); - const result = await service['fetchRelatedTweets'](tweets); + expect(result).toHaveLength(1); + expect(result[0].query).toBe('#javascript'); + }); - expect(elasticsearch_service.mget).toHaveBeenCalledWith({ - index: ELASTICSEARCH_INDICES.TWEETS, - body: { - ids: [ - '0c059811-f706-4c8f-97d7-ba2e9fc22d6d', - '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - '0c059822-f706-4c8f-97d7-ba2e9fc22d6d', - ], + it('should handle newline as sentence end', () => { + const hits = [ + { + _source: { + content: 'javascript is amazing\nNew line content', + }, }, - }); + ]; - expect(result.parent_map.size).toBe(2); - expect(result.parent_map.get('0c059811-f706-4c8f-97d7-ba2e9fc22d6d')).toEqual({ - tweet_id: '0c059811-f706-4c8f-97d7-ba2e9fc22d6d', - content: 'Parent 1', - }); - expect(result.parent_map.get('0c059899-f706-4c8f-97d7-ba2e9fc22d6d')).toEqual({ - tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - content: 'Parent 2', - }); + const result = (service as any).extractSuggestionsFromHits( + hits, + 'javascript', + trending_hashtags, + 3 + ); - expect(result.conversation_map.size).toBe(1); - expect(result.conversation_map.get('0c059822-f706-4c8f-97d7-ba2e9fc22d6d')).toEqual({ - tweet_id: '0c059822-f706-4c8f-97d7-ba2e9fc22d6d', - content: 'Conversation', - }); + expect(result).toHaveLength(1); + expect(result[0].query).toBe('javascript is amazing'); }); - it('should return empty maps when no parent or conversation ids', async () => { - const tweets = [ + it('should handle exclamation mark as sentence end', () => { + const hits = [ { - type: 'post', + _source: { + content: 'javascript is fantastic! More content here', + }, }, ]; - const result = await service['fetchRelatedTweets'](tweets); + const result = (service as any).extractSuggestionsFromHits( + hits, + 'javascript', + trending_hashtags, + 3 + ); - expect(elasticsearch_service.mget).not.toHaveBeenCalled(); - expect(result.parent_map.size).toBe(0); - expect(result.conversation_map.size).toBe(0); + expect(result).toHaveLength(1); + expect(result[0].query).toBe('javascript is fantastic'); }); - }); - describe('attachRelatedTweets', () => { - it('should attach parent and conversation tweets to items', async () => { - const items = [ + it('should handle question mark as sentence end', () => { + const hits = [ { _source: { - tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - type: 'reply', - parent_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - conversation_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - content: 'Reply content', - created_at: '2024-01-15T10:30:00Z', - updated_at: '2024-01-15T10:30:00Z', - num_likes: 5, - num_reposts: 2, - num_views: 50, - num_replies: 1, - num_quotes: 0, - author_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - username: 'alyaali', - name: 'Alyaa Ali', - avatar_url: 'https://example.com/avatar.jpg', - followers: 50, - following: 25, - images: [], - videos: [], + content: 'javascript is good? Maybe not', }, }, ]; - const mock_mget_response = { - docs: [ - { - _id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - found: true, - _source: { - tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - content: 'Parent content', - }, + const result = (service as any).extractSuggestionsFromHits( + hits, + 'javascript', + trending_hashtags, + 3 + ); + + expect(result).toHaveLength(1); + expect(result[0].query).toBe('javascript is good'); + }); + + it('should sort non-trending suggestions by length', () => { + const hits = [ + { + _source: { + content: 'test is short', }, - { - _id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - found: true, - _source: { - tweet_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - content: 'Conversation content', - }, + }, + { + _source: { + content: 'test is a very long completion', }, - ], - }; + }, + ]; - elasticsearch_service.mget.mockResolvedValueOnce(mock_mget_response as any); + const result = (service as any).extractSuggestionsFromHits(hits, 'test', new Map(), 3); + + expect(result).toHaveLength(2); + expect(result[0].query.length).toBeLessThan(result[1].query.length); + }); + + it('should handle empty hashtags array', () => { + const hits = [ + { + _source: { + hashtags: [], + content: 'test content here', + }, + }, + ]; + + const result = (service as any).extractSuggestionsFromHits( + hits, + 'test', + trending_hashtags, + 3 + ); + + expect(result).toHaveLength(1); + expect(result[0].query).toBe('test content here'); + }); + + it('should handle non-array hashtags', () => { + const hits = [ + { + _source: { + hashtags: 'not-an-array', + content: 'test content here', + }, + }, + ]; - const result = await service['attachRelatedTweets'](items); + const result = (service as any).extractSuggestionsFromHits( + hits, + 'test', + trending_hashtags, + 3 + ); expect(result).toHaveLength(1); - expect(result[0].tweet_id).toBe('0c059899-f706-4c8f-97d7-ba2e9fc22d6d'); - expect(result[0].parent_tweet).toBeDefined(); - expect(result[0].conversation_tweet).toBeDefined(); }); }); }); diff --git a/src/search/search.service.ts b/src/search/search.service.ts index ac90f77c..65911a4c 100644 --- a/src/search/search.service.ts +++ b/src/search/search.service.ts @@ -13,15 +13,18 @@ import { plainToInstance } from 'class-transformer'; import { User } from 'src/user/entities'; import { SuggestionsResponseDto } from './dto/suggestions-response.dto'; import { SuggestedUserDto } from './dto/suggested-user.dto'; -import { bool } from 'sharp'; import { TweetResponseDTO } from 'src/tweets/dto'; +import { RedisService } from 'src/redis/redis.service'; +import { TweetType } from 'src/shared/enums/tweet-types.enum'; +import { STRING_MAX_LENGTH } from 'src/constants/variables'; @Injectable() export class SearchService { constructor( private readonly elasticsearch_service: ElasticsearchService, private readonly user_repository: UserRepository, - private readonly data_source: DataSource + private readonly data_source: DataSource, + private readonly redis_service: RedisService ) {} async getSuggestions( @@ -30,36 +33,31 @@ export class SearchService { ): Promise { const { query } = query_dto; - const decoded_query = decodeURIComponent(query); - const sanitized_query = decoded_query.replace(/[^\w\s#]/gi, ''); + const sanitized_query = this.validateAndSanitizeQuery(query); - if (!sanitized_query.trim()) { + if (!sanitized_query) { return { suggested_queries: [], suggested_users: [] }; } - const prefix_query = sanitized_query - .split(/\s+/) - .filter(Boolean) - .map((term) => `${term}:*`) - .join(' & '); + const prefix_query = this.buildUserPrefixQuery(sanitized_query); let query_builder = this.user_repository.createQueryBuilder('user'); - query_builder = this.attachUserSearchQuery(query_builder, sanitized_query); + query_builder = this.attachUserSearchQuery(query_builder, prefix_query); query_builder.setParameters({ current_user_id, prefix_query, }); + const trending_hashtags: Map = await this.getTrendingHashtags(); + const [users_result, queries_result] = await Promise.all([ - query_builder - .orderBy('total_score', 'DESC') - .addOrderBy('user.id', 'ASC') - .limit(10) - .getRawMany(), + this.executeUsersSearch(query_builder, 10), - this.elasticsearch_service.search(this.buildEsSuggestionsQuery(sanitized_query)), + this.elasticsearch_service.search( + this.buildEsSuggestionsQuery(sanitized_query, trending_hashtags) + ), ]); const users_list = users_result.map((user) => @@ -69,15 +67,15 @@ export class SearchService { }) ); - const suggestions = this.extractSuggestionsFromHits(queries_result.hits.hits, query, 3); - - const suggested_queries = suggestions.map((query) => ({ + const suggestions = this.extractSuggestionsFromHits( + queries_result.hits.hits, query, - is_trending: false, - })); + trending_hashtags, + 3 + ); return { - suggested_queries: suggested_queries, + suggested_queries: suggestions, suggested_users: users_list, }; } @@ -88,72 +86,31 @@ export class SearchService { ): Promise { const { query, cursor, limit = 20, username } = query_dto; - const decoded_query = decodeURIComponent(query); - const sanitized_query = decoded_query.replace(/[^\w\s#]/gi, ''); + const sanitized_query = this.validateAndSanitizeQuery(query); - if (!sanitized_query.trim()) { - return { data: [], pagination: { next_cursor: null, has_more: false } }; + if (!sanitized_query) { + return this.createEmptyResponse(); } - const prefix_query = sanitized_query - .split(/\s+/) - .filter(Boolean) - .map((term) => `${term}:*`) - .join(' & '); + const prefix_query = this.buildUserPrefixQuery(sanitized_query); - let cursor_score: number | null = null; - let cursor_id: string | null = null; + const cursor_data = cursor ? this.decodeUsersCursor(cursor) : null; - if (cursor) { - try { - const decoded = JSON.parse(Buffer.from(cursor, 'base64').toString('utf-8')); - cursor_score = decoded.score; - cursor_id = decoded.user_id; - } catch (error) { - throw new Error('Invalid cursor'); - } - } + const cursor_score = cursor_data?.score; + const cursor_id = cursor_data?.user_id; const fetch_limit = limit + 1; let query_builder = this.user_repository.createQueryBuilder('user'); - query_builder = this.attachUserSearchQuery(query_builder, sanitized_query); + query_builder = this.attachUserSearchQuery(query_builder, prefix_query); if (username) { - query_builder.andWhere(`EXISTS ( - SELECT 1 FROM "user" target_user - WHERE target_user.username = :username - AND ( - EXISTS ( - SELECT 1 FROM user_follows uf1 - WHERE uf1.follower_id = "user".id - AND uf1.followed_id = target_user.id - ) - OR - EXISTS ( - SELECT 1 FROM user_follows uf2 - WHERE uf2.followed_id = "user".id - AND uf2.follower_id = target_user.id - ) - ) - )`); + query_builder = this.attachUsersUsernameFilter(query_builder); } - if (cursor && cursor_score !== null && cursor_id !== null) { - query_builder.andWhere( - new Brackets((qb) => { - qb.where(`${this.getUserScoreExpression()} < :cursor_score`, { - cursor_score, - }).orWhere( - new Brackets((qb2) => { - qb2.where(`${this.getUserScoreExpression()} = :cursor_score`, { - cursor_score, - }).andWhere('"user".id > :cursor_id', { cursor_id }); - }) - ); - }) - ); + if (cursor && cursor_score && cursor_id) { + this.applyUserCursorPagination(query_builder, cursor_score, cursor_id); } query_builder.setParameters({ @@ -162,24 +119,9 @@ export class SearchService { username, }); - const results = await query_builder - .orderBy('total_score', 'DESC') - .addOrderBy('user.id', 'ASC') - .limit(fetch_limit) - .getRawMany(); - - const has_more = results.length > limit; - const users = has_more ? results.slice(0, limit) : results; + const results = await this.executeUsersSearch(query_builder, fetch_limit); - let next_cursor: string | null = null; - if (has_more && users.length > 0) { - const last_user = users[users.length - 1]; - const cursor_data = { - score: last_user.total_score, - user_id: last_user.user_id, - }; - next_cursor = Buffer.from(JSON.stringify(cursor_data)).toString('base64'); - } + const { users, has_more, next_cursor } = this.processUserPaginationResults(results, limit); const users_list = users.map((user) => plainToInstance(UserListItemDto, user, { @@ -197,371 +139,217 @@ export class SearchService { }; } - async elasticSearchUsers( + async searchPosts( current_user_id: string, - query_dto: SearchQueryDto - ): Promise { - const { query } = query_dto; + query_dto: PostsSearchDto + ): Promise { + const { query, cursor, limit = 20, has_media, username } = query_dto; - const { cursor, limit = 20 } = query_dto; + const sanitized_query = this.validateAndSanitizeQuery(query); - if (!query || query.trim().length === 0) { - return { - data: [], - pagination: { - next_cursor: null, - has_more: false, - }, - }; + if (!sanitized_query) { + return this.createEmptyResponse(); } try { - const following_rows = await this.data_source.query( - `SELECT followed_id - FROM user_follows - WHERE follower_id = $1`, - [current_user_id] - ); + const search_body: any = this.buildBaseSearchBody('relevance', limit, cursor); - const following_ids = following_rows.map((row) => row.followed_id); - - const search_body: any = { - query: { - function_score: { - query: { - bool: { - must: [ - { - multi_match: { - query: query.trim(), - fields: ['username^3', 'name^2', 'bio'], - type: 'best_fields', - fuzziness: 'AUTO', - prefix_length: 1, - operator: 'or', - }, - }, - ], - filter: [], - }, - }, - functions: [ - { - filter: { - terms: { - user_id: following_ids, - }, - }, - weight: 1000000, - }, - { - field_value_factor: { - field: 'followers', - factor: 1, - modifier: 'log1p', - missing: 0, - }, - weight: 100, - }, - ], - score_mode: 'sum', - boost_mode: 'sum', - }, - }, - size: limit + 1, - sort: [{ _score: { order: 'desc' } }, { user_id: { order: 'asc' } }], - }; - - if (cursor) { - search_body.search_after = this.decodeCursor(cursor); - } + const { hashtags, remaining_text } = this.extractHashtagsAndText(sanitized_query); - const result = await this.elasticsearch_service.search({ - index: 'users', - body: search_body, - }); + this.addHashtagFilters(search_body, hashtags); - const hits = result.hits.hits; + if (remaining_text.length > 0) { + this.buildTweetsSearchQuery(search_body, remaining_text); + search_body.query.bool.minimum_should_match = 1; + } - const has_more = hits.length > limit; - const items = has_more ? hits.slice(0, limit) : hits; + const trending_hashtags: Map = await this.getTrendingHashtags(); - let next_cursor: string | null = null; + this.applyTweetsBoosting(search_body, trending_hashtags); - if (has_more) { - const last_hit = hits[limit - 1]; - next_cursor = this.encodeCursor(last_hit.sort) ?? null; + if (has_media) { + this.addMediaFilter(search_body); } - const users = items.map((hit: any) => ({ - user_id: hit._source.user_id, - username: hit._source.username, - name: hit._source.name, - bio: hit._source.bio, - country: hit._source.country, - followers: hit._source.followers, - following: hit._source.following, - verified: hit._source.verified, - avatar_url: hit._source.avatar_url, - })); + if (username) { + this.addTweetsUsernameFilter(search_body, username); + } - return { - data: users, - pagination: { - next_cursor, - has_more, - }, - }; + return await this.executeTweetsSearch(search_body, current_user_id); } catch (error) { console.log(error); - return { - data: [], - pagination: { - next_cursor: null, - has_more: false, - }, - }; + return this.createEmptyResponse(); } } - async searchPosts( + async searchLatestPosts( current_user_id: string, - query_dto: PostsSearchDto + query_dto: SearchQueryDto ): Promise { - const { query, cursor, limit = 20, has_media, username } = query_dto; + const { query, cursor, limit = 20, username } = query_dto; - const decoded_query = decodeURIComponent(query); - const sanitized_query = decoded_query.replace(/[^\w\s#]/gi, ''); + const sanitized_query = this.validateAndSanitizeQuery(query); - if (!sanitized_query || sanitized_query.trim().length === 0) { - return { - data: [], - pagination: { - next_cursor: null, - has_more: false, - }, - }; + if (!sanitized_query) { + return this.createEmptyResponse(); } try { - const search_body: any = { - query: { - bool: { - must: [], - should: [], - minimum_should_match: 1, - }, - }, - size: limit + 1, - sort: [ - { _score: { order: 'desc' } }, - { created_at: { order: 'desc' } }, - { tweet_id: { order: 'desc' } }, - ], - }; + const search_body: any = this.buildBaseSearchBody('recency', limit, cursor); - if (cursor) { - search_body.search_after = this.decodeCursor(cursor); - } + const { hashtags, remaining_text } = this.extractHashtagsAndText(sanitized_query); - const hashtag_pattern = /#\w+/g; - const hashtags = sanitized_query.match(hashtag_pattern) || []; - const remaining_text = sanitized_query.replace(hashtag_pattern, '').trim(); - - if (hashtags.length > 0) { - hashtags.forEach((hashtag) => { - search_body.query.bool.must.push({ - term: { - hashtags: { - value: hashtag.toLowerCase(), - boost: 10, - }, - }, - }); - }); - } + this.addHashtagFilters(search_body, hashtags); if (remaining_text.length > 0) { this.buildTweetsSearchQuery(search_body, remaining_text); + search_body.query.bool.minimum_should_match = 1; } - this.applyTweetsBoosting(search_body); + const trending_hashtags: Map = await this.getTrendingHashtags(); - if (has_media) { - search_body.query.bool.filter = search_body.query.bool.filter || []; - search_body.query.bool.filter.push({ - script: { - script: { - source: "(doc['images'].size() > 0 || doc['videos'].size() > 0)", - }, - }, - }); - } + this.applyTweetsBoosting(search_body, trending_hashtags); if (username) { - search_body.query.bool.filter = search_body.query.bool.filter || []; - search_body.query.bool.filter.push({ - term: { - username, - }, - }); + this.addTweetsUsernameFilter(search_body, username); } - const result = await this.elasticsearch_service.search({ - index: ELASTICSEARCH_INDICES.TWEETS, - body: search_body, - }); + return await this.executeTweetsSearch(search_body, current_user_id); + } catch (error) { + console.log(error); + return this.createEmptyResponse(); + } + } - const hits = result.hits.hits; + async getMentionSuggestions( + current_user_id: string, + query_dto: BasicQueryDto + ): Promise { + const { query } = query_dto; - const has_more = hits.length > limit; - const items = has_more ? hits.slice(0, limit) : hits; + const sanitized_query = this.validateAndSanitizeQuery(query); - let next_cursor: string | null = null; + if (!sanitized_query) { + return []; + } - if (has_more) { - const last_hit = hits[limit - 1]; - next_cursor = this.encodeCursor(last_hit.sort) ?? null; - } + const prefix_query = this.buildUserPrefixQuery(sanitized_query); - const mapped_tweets = await this.attachRelatedTweets(items); + let query_builder = this.user_repository.createQueryBuilder('user'); - const tweets_with_interactions = await this.attachUserInteractions( - mapped_tweets, - current_user_id - ); + query_builder = this.attachUserSearchQuery(query_builder, prefix_query); - return { - data: tweets_with_interactions, - pagination: { - next_cursor, - has_more, - }, - }; - } catch (error) { - console.log(error); - return { - data: [], - pagination: { - next_cursor: null, - has_more: false, - }, - }; - } - } + query_builder.setParameters({ + current_user_id, + prefix_query, + }); - async searchLatestPosts( - current_user_id: string, - query_dto: SearchQueryDto - ): Promise { - const { query, cursor, limit = 20, username } = query_dto; + const users_result = await this.executeUsersSearch(query_builder, 10); + + const users_list = users_result.map((user) => + plainToInstance(SuggestedUserDto, user, { + enableImplicitConversion: true, + excludeExtraneousValues: true, + }) + ); + return users_list; + } + + private validateAndSanitizeQuery(query: string): string | null { const decoded_query = decodeURIComponent(query); - const sanitized_query = decoded_query.replace(/[^\w\s#]/gi, ''); + const sanitized_query = decoded_query.replaceAll(/[^\p{L}\p{N}\s#\s_]/gu, ''); if (!sanitized_query || sanitized_query.trim().length === 0) { - return { - data: [], - pagination: { - next_cursor: null, - has_more: false, - }, - }; + return null; } - try { - const search_body: any = { - query: { - bool: { - must: [], - should: [], - }, - }, - size: limit + 1, - sort: [ - { created_at: { order: 'desc' } }, - { _score: { order: 'desc' } }, - { tweet_id: { order: 'desc' } }, - ], - }; - - if (cursor) { - search_body.search_after = this.decodeCursor(cursor); - } - - const hashtag_pattern = /#\w+/g; - const hashtags = sanitized_query.match(hashtag_pattern) || []; - const remaining_text = sanitized_query.replace(hashtag_pattern, '').trim(); + return sanitized_query; + } - if (hashtags.length > 0) { - hashtags.forEach((hashtag) => { - search_body.query.bool.must.push({ - term: { - hashtags: { - value: hashtag.toLowerCase(), - boost: 10, - }, - }, - }); - }); - } + private createEmptyResponse(): { + data: []; + pagination: { next_cursor: string | null; has_more: boolean }; + } { + return { + data: [], + pagination: { + next_cursor: null, + has_more: false, + }, + }; + } - if (remaining_text.length > 0) { - this.buildTweetsSearchQuery(search_body, remaining_text); - } + private buildBaseSearchBody( + type: 'relevance' | 'recency', + limit: number, + cursor?: string | null + ): any { + const search_body: any = { + query: { + bool: { + must: [], + should: [], + }, + }, + size: limit + 1, + sort: + type === 'relevance' + ? [ + { _score: { order: 'desc' } }, + { created_at: { order: 'desc' } }, + { tweet_id: { order: 'desc' } }, + ] + : [ + { created_at: { order: 'desc' } }, + { _score: { order: 'desc' } }, + { tweet_id: { order: 'desc' } }, + ], + }; - this.applyTweetsBoosting(search_body); + if (cursor) { + search_body.search_after = this.decodeTweetsCursor(cursor); + } - if (username) { - search_body.query.bool.filter = search_body.query.bool.filter || []; - search_body.query.bool.filter.push({ - term: { - username, - }, - }); - } + return search_body; + } - const result = await this.elasticsearch_service.search({ - index: ELASTICSEARCH_INDICES.TWEETS, - body: search_body, - }); + private async executeTweetsSearch( + search_body: any, + current_user_id: string + ): Promise { + const result = await this.elasticsearch_service.search({ + index: ELASTICSEARCH_INDICES.TWEETS, + body: search_body, + }); - const hits = result.hits.hits; + const limit = search_body.size - 1; - const has_more = hits.length > limit; - const items = has_more ? hits.slice(0, limit) : hits; + const hits = result.hits.hits; - let next_cursor: string | null = null; + const has_more = hits.length > limit; + const items = has_more ? hits.slice(0, limit) : hits; - if (has_more) { - const last_hit = hits[limit - 1]; - next_cursor = this.encodeCursor(last_hit.sort) ?? null; - } + let next_cursor: string | null = null; - const mapped_tweets = await this.attachRelatedTweets(items); + if (has_more) { + const last_hit = hits[limit - 1]; + next_cursor = this.encodeTweetsCursor(last_hit.sort) ?? null; + } - const tweets_with_interactions = await this.attachUserInteractions( - mapped_tweets, - current_user_id - ); + const mapped_tweets = await this.attachRelatedTweets(items); + const tweets_with_interactions = await this.attachUserInteractions( + mapped_tweets, + current_user_id + ); - return { - data: tweets_with_interactions, - pagination: { - next_cursor, - has_more, - }, - }; - } catch (error) { - console.log(error); - return { - data: [], - pagination: { - next_cursor: null, - has_more: false, - }, - }; - } + return { + data: tweets_with_interactions, + pagination: { + next_cursor, + has_more, + }, + }; } private mapTweet(hit: any, parent_source?: any, conversation_source?: any): TweetResponseDTO { @@ -591,6 +379,7 @@ export class SearchService { images: s.images ?? [], videos: s.videos ?? [], + mentions: s.mentions || [], }; if (parent_source) { @@ -614,6 +403,7 @@ export class SearchService { }, images: parent_source.images ?? [], videos: parent_source.videos ?? [], + mentions: parent_source.mentions ?? [], }; } @@ -638,18 +428,19 @@ export class SearchService { }, images: conversation_source.images ?? [], videos: conversation_source.videos ?? [], + mentions: parent_source.mentions ?? [], }; } return tweet; } - private encodeCursor(sort: any[] | undefined): string | null { + private encodeTweetsCursor(sort: any[] | undefined): string | null { if (!sort) return null; return Buffer.from(JSON.stringify(sort)).toString('base64'); } - private decodeCursor(cursor: string | null): any[] | null { + private decodeTweetsCursor(cursor: string | null): any[] | null { if (!cursor) return null; try { return JSON.parse(Buffer.from(cursor, 'base64').toString('utf8')); @@ -663,7 +454,14 @@ export class SearchService { { multi_match: { query: sanitized_query.trim(), - fields: ['content^3', 'username^2', 'name'], + fields: [ + 'content^3', + 'content.arabic^3', + 'username^2', + 'name', + 'name.arabic', + 'mentions^2', + ], type: 'best_fields', fuzziness: 'AUTO', prefix_length: 1, @@ -672,54 +470,136 @@ export class SearchService { }, }, { - match: { - 'content.autocomplete': { + match_phrase: { + content: { query: sanitized_query.trim(), boost: 5, }, }, }, { - prefix: { - username: { - value: sanitized_query.trim().toLowerCase(), - boost: 3, + match_phrase: { + 'content.arabic': { + query: sanitized_query.trim(), + boost: 5, }, }, }, { - match_phrase_prefix: { - name: { + match: { + 'content.autocomplete': { query: sanitized_query.trim(), boost: 2, }, }, + }, + { + match: { + 'name.autocomplete': { + query: sanitized_query.trim(), + boost: 1, + }, + }, } ); } - private applyTweetsBoosting(search_body: any): void { + private extractHashtagsAndText(sanitized_query: string): { + hashtags: string[]; + remaining_text: string; + } { + const hashtag_pattern = /#[\p{L}\p{N}_]+/gu; + const hashtags = sanitized_query.match(hashtag_pattern) || []; + const remaining_text = sanitized_query.replaceAll(hashtag_pattern, '').trim(); + + return { hashtags, remaining_text }; + } + + private addHashtagFilters(search_body: any, hashtags: string[]): void { + if (hashtags.length > 0) { + hashtags.forEach((hashtag) => { + search_body.query.bool.must.push({ + term: { + hashtags: { + value: hashtag.toLowerCase(), + boost: 10, + }, + }, + }); + }); + } + } + + private addMediaFilter(search_body: any): void { + search_body.query.function_score.query.bool.filter = + search_body.query.function_score.query.bool.filter || []; + search_body.query.function_score.query.bool.filter.push({ + script: { + script: { + source: "(doc['images'].size() > 0 || doc['videos'].size() > 0)", + }, + }, + }); + } + + private addTweetsUsernameFilter(search_body: any, username: string): void { + search_body.query.function_score.query.bool.filter = + search_body.query.function_score.query.bool.filter || []; + search_body.query.function_score.query.bool.filter.push({ + term: { + username, + }, + }); + } + + private applyTweetsBoosting(search_body: any, trending_hashtags?: Map): void { const boosting_factors = [ - { field: 'num_likes', factor: 0.01 }, - { field: 'num_reposts', factor: 0.02 }, - { field: 'num_quotes', factor: 0.02 }, - { field: 'num_replies', factor: 0.02 }, - { field: 'num_views', factor: 0.001 }, - { field: 'followers', factor: 0.001 }, + { field: 'num_likes', factor: 2 }, + { field: 'num_reposts', factor: 2.5 }, + { field: 'num_quotes', factor: 2.2 }, + { field: 'num_replies', factor: 1.5 }, + { field: 'num_views', factor: 0.1 }, + { field: 'followers', factor: 1 }, ]; - const boost_queries = boosting_factors.map(({ field, factor }) => ({ - function_score: { + const functions: any[] = [ + ...boosting_factors.map(({ field, factor }) => ({ field_value_factor: { field, factor, modifier: 'log1p', missing: 0, }, - }, - })); + })), + ]; + + if (trending_hashtags && trending_hashtags.size > 0) { + const max_score = Math.max(...Array.from(trending_hashtags.values()), 1); + + const trending_functions = Array.from(trending_hashtags.entries()).map( + ([hashtag, score]) => ({ + filter: { + term: { + hashtags: { value: hashtag }, + }, + }, + weight: 10 + (score / max_score) * 10, + }) + ); + + functions.push(...trending_functions); + } + + const original_query = { ...search_body.query }; - search_body.query.bool.should.push(...boost_queries); + search_body.query = { + function_score: { + query: original_query, + functions, + score_mode: 'sum', + boost_mode: 'sum', + }, + }; } private async attachRelatedTweets(items: any[]): Promise { @@ -797,17 +677,44 @@ export class SearchService { return tweets; } - const tweet_values = tweets + const all_tweet_user_pairs: Array<{ tweet_id: string; user_id: string; path: string }> = []; + + tweets.forEach((tweet) => { + all_tweet_user_pairs.push({ + tweet_id: tweet.tweet_id, + user_id: tweet.user?.id, + path: 'main', + }); + + if (tweet.parent_tweet) { + all_tweet_user_pairs.push({ + tweet_id: tweet.parent_tweet.tweet_id, + user_id: tweet.parent_tweet.user?.id, + path: 'parent', + }); + } + + if (tweet.conversation_tweet) { + all_tweet_user_pairs.push({ + tweet_id: tweet.conversation_tweet.tweet_id, + user_id: tweet.conversation_tweet.user?.id, + path: 'conversation', + }); + } + }); + + const tweet_values = all_tweet_user_pairs .map((_, idx) => `($${idx * 2 + 1}::uuid, $${idx * 2 + 2}::uuid)`) .join(', '); - const tweet_params_count = tweets.length * 2; + const tweet_params_count = all_tweet_user_pairs.length * 2; const liked_param = `$${tweet_params_count + 1}`; const reposted_param = `$${tweet_params_count + 2}`; - const following_param = `$${tweet_params_count + 3}`; - const follower_param = `$${tweet_params_count + 4}`; - const blocked_param = `$${tweet_params_count + 5}`; - const muted_param = `$${tweet_params_count + 6}`; + const bookmarked_param = `$${tweet_params_count + 3}`; + const following_param = `$${tweet_params_count + 4}`; + const follower_param = `$${tweet_params_count + 5}`; + const blocked_param = `$${tweet_params_count + 6}`; + const muted_param = `$${tweet_params_count + 7}`; const query = ` SELECT @@ -823,6 +730,11 @@ export class SearchService { WHERE tweet_id = t.tweet_id AND user_id = ${reposted_param}::uuid ))::int as is_reposted, + (EXISTS( + SELECT 1 FROM tweet_bookmarks + WHERE tweet_id = t.tweet_id + AND user_id = ${bookmarked_param}::uuid + ))::int as is_bookmarked, (EXISTS( SELECT 1 FROM user_follows WHERE followed_id = t.user_id @@ -846,7 +758,7 @@ export class SearchService { ) `; - const tweet_params = tweets.flatMap((t) => [t.tweet_id, t.user?.id]); + const tweet_params = all_tweet_user_pairs.flatMap((pair) => [pair.tweet_id, pair.user_id]); const params = [ ...tweet_params, current_user_id, @@ -855,6 +767,7 @@ export class SearchService { current_user_id, current_user_id, current_user_id, + current_user_id, ]; interface IInteractionResult { @@ -862,6 +775,7 @@ export class SearchService { user_id: string; is_liked: number; is_reposted: number; + is_bookmarked: number; is_following: number; is_follower: number; } @@ -874,28 +788,95 @@ export class SearchService { { is_liked: Boolean(i.is_liked), is_reposted: Boolean(i.is_reposted), + is_bookmarked: Boolean(i.is_bookmarked), is_following: Boolean(i.is_following), is_follower: Boolean(i.is_follower), }, ]) ); - const filtered_tweets = tweets.filter((tweet) => interactions_map.has(tweet.tweet_id)); + const result_tweets = tweets + .map((tweet) => { + const main_interaction = interactions_map.get(tweet.tweet_id); - return filtered_tweets.map((tweet) => { - const interaction = interactions_map.get(tweet.tweet_id); + if (!main_interaction) { + return null; + } - return { - ...tweet, - is_liked: interaction?.is_liked ?? false, - is_reposted: interaction?.is_reposted ?? false, - user: { - ...tweet.user, - is_following: interaction?.is_following ?? false, - is_follower: interaction?.is_follower ?? false, - }, - }; - }); + const parent_interaction = tweet.parent_tweet + ? interactions_map.get(tweet.parent_tweet.tweet_id) + : undefined; + + const conversation_interaction = tweet.conversation_tweet + ? interactions_map.get(tweet.conversation_tweet.tweet_id) + : undefined; + + if (tweet.type === TweetType.QUOTE && !parent_interaction) { + return null; + } + + if (tweet.type === TweetType.REPLY) { + if (!parent_interaction) { + return null; + } + if (!conversation_interaction) { + return null; + } + } + + const result: any = { + ...tweet, + is_liked: main_interaction.is_liked, + is_reposted: main_interaction.is_reposted, + is_bookmarked: main_interaction.is_bookmarked, + user: { + ...tweet.user, + is_following: main_interaction.is_following, + is_follower: main_interaction.is_follower, + }, + }; + + if (tweet.parent_tweet && parent_interaction) { + result.parent_tweet = { + ...tweet.parent_tweet, + is_liked: parent_interaction.is_liked, + is_reposted: parent_interaction.is_reposted, + is_bookmarked: parent_interaction.is_bookmarked, + user: { + ...tweet.parent_tweet.user, + is_following: parent_interaction.is_following, + is_follower: parent_interaction.is_follower, + }, + }; + } + + if (tweet.conversation_tweet && conversation_interaction) { + result.conversation_tweet = { + ...tweet.conversation_tweet, + is_liked: conversation_interaction.is_liked, + is_reposted: conversation_interaction.is_reposted, + is_bookmarked: conversation_interaction.is_bookmarked, + user: { + ...tweet.conversation_tweet.user, + is_following: conversation_interaction.is_following, + is_follower: conversation_interaction.is_follower, + }, + }; + } + + return result; + }) + .filter((tweet) => tweet !== null); + + return result_tweets; + } + + private buildUserPrefixQuery(sanitized_query: string): string { + return sanitized_query + .split(/\s+/) + .filter(Boolean) + .map((term) => `${term}:*`) + .join(' & '); } private attachUserSearchQuery( @@ -957,45 +938,143 @@ export class SearchService { private getUserScoreExpression(): string { return ` - (COALESCE(uf_following.boost, 0)) - + - (ts_rank("user".search_vector, to_tsquery('simple', :prefix_query)) * 1000) - + - (LOG(GREATEST("user".followers, 1) + 1) * 100) + (COALESCE(uf_following.boost, 0)) + + + (ts_rank("user".search_vector, to_tsquery('simple', :prefix_query)) * 1000) + + + (LOG(GREATEST("user".followers, 1) + 1) * 100) `; } - private buildEsSuggestionsQuery(sanitized_query: string) { + private decodeUsersCursor(cursor: string): { score: number; user_id: string } { + try { + const decoded = JSON.parse(Buffer.from(cursor, 'base64').toString('utf-8')); + return { + score: decoded.score, + user_id: decoded.user_id, + }; + } catch (error) { + throw new Error('Invalid cursor'); + } + } + + private encodeUsersCursor(score: number, user_id: string): string { + const cursor_data = { score, user_id }; + return Buffer.from(JSON.stringify(cursor_data)).toString('base64'); + } + + private async executeUsersSearch( + query_builder: SelectQueryBuilder, + fetch_limit: number + ): Promise { + return await query_builder + .orderBy('total_score', 'DESC') + .addOrderBy('user.id', 'ASC') + .limit(fetch_limit) + .getRawMany(); + } + + private processUserPaginationResults( + results: any[], + limit: number + ): { + users: any[]; + has_more: boolean; + next_cursor: string | null; + } { + const has_more = results.length > limit; + const users = has_more ? results.slice(0, limit) : results; + + let next_cursor: string | null = null; + if (has_more && users.length > 0) { + const last_user = users[users.length - 1]; + next_cursor = this.encodeUsersCursor(last_user.total_score, last_user.user_id); + } + + return { users, has_more, next_cursor }; + } + + private applyUserCursorPagination( + query_builder: SelectQueryBuilder, + cursor_score: number, + cursor_id: string + ): void { + query_builder.andWhere( + new Brackets((qb) => { + qb.where(`${this.getUserScoreExpression()} < :cursor_score`, { + cursor_score, + }).orWhere( + new Brackets((qb2) => { + qb2.where(`${this.getUserScoreExpression()} = :cursor_score`, { + cursor_score, + }).andWhere('"user".id > :cursor_id', { cursor_id }); + }) + ); + }) + ); + } + + private attachUsersUsernameFilter( + query_builder: SelectQueryBuilder + ): SelectQueryBuilder { + query_builder.andWhere(`EXISTS ( + SELECT 1 FROM "user" target_user + WHERE target_user.username = :username + AND ( + EXISTS ( + SELECT 1 FROM user_follows uf1 + WHERE uf1.follower_id = "user".id + AND uf1.followed_id = target_user.id + ) + OR + EXISTS ( + SELECT 1 FROM user_follows uf2 + WHERE uf2.followed_id = "user".id + AND uf2.follower_id = target_user.id + ) + ) + )`); + + return query_builder; + } + + private buildEsSuggestionsQuery( + sanitized_query: string, + trending_hashtags: Map + ) { const is_hashtag = sanitized_query.startsWith('#'); const search_body = { index: 'tweets', size: 20, - _source: ['content'], + _source: ['content', 'hashtags'], query: { bool: { should: [ - ...(!is_hashtag - ? [ + { + prefix: { + hashtags: { + value: is_hashtag + ? sanitized_query.toLowerCase() + : `#${sanitized_query.toLowerCase()}`, + boost: 3, + }, + }, + }, + + ...(is_hashtag + ? [] + : [ { - prefix: { - hashtags: { - value: `#${sanitized_query.toLowerCase()}`, + match: { + 'content.autocomplete': { + query: sanitized_query, boost: 3, + operator: 'and', }, }, }, - ] - : []), - { - match_phrase_prefix: { - content: { - query: sanitized_query, - slop: 0, - boost: 2, - }, - }, - }, + ]), ], minimum_should_match: 1, }, @@ -1013,73 +1092,101 @@ export class SearchService { }, }; - this.applyTweetsBoosting(search_body); + this.applyTweetsBoosting(search_body, trending_hashtags); return search_body; } - private extractSuggestionsFromHits(hits: any[], query: string, max_suggestions = 3): string[] { - const suggestions = new Set(); + private extractSuggestionsFromHits( + hits: any[], + query: string, + trending_hashtags: Map, + max_suggestions = 3 + ): Array<{ query: string; is_trending: boolean }> { + const suggestions = new Map(); const query_lower = query.toLowerCase().trim(); const is_hashtag_query = query_lower.startsWith('#'); + const search_prefix = is_hashtag_query ? query_lower : `#${query_lower}`; hits.forEach((hit) => { + if (hit._source?.hashtags && Array.isArray(hit._source.hashtags)) { + for (const hashtag of hit._source.hashtags) { + if (hashtag.toLowerCase().startsWith(search_prefix)) { + const is_trending = trending_hashtags.has(hashtag.toLowerCase()); + suggestions.set(hashtag, is_trending); + return; + } + } + } + let text = hit.highlight?.content?.[0] || hit._source?.content; if (!text) return; - const text_with_marks = text; - text = text.replace(/<\/?MARK>/g, ''); + text = text.replaceAll(/<\/?MARK>/g, ''); const lower_text = text.toLowerCase(); + const query_index = lower_text.indexOf(query_lower); - const mark_index = text_with_marks.indexOf(''); - let query_index: number; - let is_hashtag = is_hashtag_query; + if (query_index === -1) return; - if (mark_index !== -1) { - const before_mark = text_with_marks.substring(0, mark_index); - const has_hash_before_mark = before_mark.endsWith('#'); + const from_query = text.substring(query_index); - if (has_hash_before_mark && !is_hashtag_query) { - is_hashtag = true; - const actual_position = before_mark.replace(/<\/?MARK>/g, '').length; - query_index = actual_position - 1; - } else { - query_index = lower_text.indexOf(query_lower); - } - } else { - query_index = lower_text.indexOf(query_lower); - } + const sentence_end_match = from_query.match(/[.!?\n]/); + const end_index = sentence_end_match + ? sentence_end_match.index + : Math.min(from_query.length, 100); + const completion = from_query + .substring(0, end_index) + .trim() + .replace(/[,;:]+$/, '') + .trim(); + + if (completion.length < query.length + 3 || completion.length > STRING_MAX_LENGTH) + return; + if (!completion.toLowerCase().startsWith(query_lower)) return; + const middle_content = completion.substring(0, completion.length - 1); + if (/[.!?]/.test(middle_content)) return; + + suggestions.set(completion, false); + }); - if (query_index === -1) return; + return Array.from(suggestions.entries()) + .sort((a, b) => { + if (a[1] !== b[1]) return a[1] ? -1 : 1; + return a[0].length - b[0].length; + }) + .slice(0, max_suggestions) + .map(([query, is_trending]) => ({ query, is_trending })); + } - const from_query = text.substring(query_index); + private async getTrendingHashtags(): Promise> { + try { + const result = await this.redis_service.zrevrange( + 'trending:global', + 0, + 29, + 'WITHSCORES' + ); + + if (!result || result.length === 0) return new Map(); + + const trending_map = new Map(); - let completion: string; - if (is_hashtag) { - const hashtag_match = from_query.match(/^#\w+/); - if (!hashtag_match) return; - completion = hashtag_match[0]; - } else { - const sentence_end_match = from_query.match(/[.!?\n]/); - const end_index = sentence_end_match - ? sentence_end_match.index - : Math.min(from_query.length, 100); - completion = from_query.substring(0, end_index).trim(); - - completion = completion.replace(/[,;:]+$/, '').trim(); - - if (completion.length < query.length + 3) return; - if (completion.length > 100) return; - if (!completion.toLowerCase().startsWith(query_lower)) return; - const middle_content = completion.substring(0, completion.length - 1); - if (/[.!?]/.test(middle_content)) return; + for (let i = 0; i < result.length; i += 2) { + const hashtag = result[i]; + const score = Number.parseFloat(result[i + 1]); + + const normalized = hashtag.toLowerCase().startsWith('#') + ? hashtag.toLowerCase() + : `#${hashtag.toLowerCase()}`; + + trending_map.set(normalized, score); } - suggestions.add(completion); - }); - return Array.from(suggestions) - .sort((a, b) => a.length - b.length) - .slice(0, max_suggestions); + return trending_map; + } catch (error) { + console.error('Error fetching trending hashtags:', error); + return new Map(); + } } } diff --git a/src/search/search.swagger.ts b/src/search/search.swagger.ts index 8b5f17b1..95929e10 100644 --- a/src/search/search.swagger.ts +++ b/src/search/search.swagger.ts @@ -436,3 +436,42 @@ export const search_latest_posts = { }, }, }; + +export const get_mention_suggestions_swagger = { + operation: { + summary: 'Get mention suggestions', + description: ` + Get relevant suggestions of people for a given query + `, + }, + + responses: { + success: { + description: 'Search suggestions retrieved successfully', + schema: { + example: { + data: [ + { + user_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + name: 'Alyaa Ali', + username: 'Alyaali242', + avatar_url: 'https://cdn.app.com/profiles/u877.jpg', + is_following: true, + is_follower: false, + }, + { + user_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + name: 'Alia Mohamed', + username: 'alyaa#222', + avatar_url: 'https://cdn.app.com/profiles/u877.jpg', + is_following: false, + is_follower: false, + }, + ], + count: 2, + message: SUCCESS_MESSAGES.SUGGESTIONS_RETRIEVED, + }, + }, + }, + }, +}; diff --git a/src/shared/services/encryption/encryption.service.ts b/src/shared/services/encryption/encryption.service.ts index 39c4f61c..1587595e 100644 --- a/src/shared/services/encryption/encryption.service.ts +++ b/src/shared/services/encryption/encryption.service.ts @@ -4,9 +4,9 @@ import * as crypto from 'crypto'; @Injectable() export class EncryptionService { - private algorithm = 'aes-256-cbc'; - private encryptionKey: Buffer; - private ivLength = 16; // Initialization vector length for AES + private readonly algorithm = 'aes-256-cbc'; + private readonly encryptionKey: Buffer; + private readonly ivLength = 16; // Initialization vector length for AES private readonly DEFAULT_ENCRYPTION_KEY = 'yapper-default-encryption-key-fallback-value-change-in-production-environment'; // Fallback for development diff --git a/src/timeline/dto/scored-candidates.dto.ts b/src/timeline/dto/scored-candidates.dto.ts index 0eccbdb7..aad304a0 100644 --- a/src/timeline/dto/scored-candidates.dto.ts +++ b/src/timeline/dto/scored-candidates.dto.ts @@ -1,4 +1,4 @@ -import { Expose, Type } from 'class-transformer'; +import { Expose } from 'class-transformer'; import { TweetResponseDTO } from 'src/tweets/dto/tweet-response.dto'; export class ScoredCandidateDTO extends TweetResponseDTO { diff --git a/src/timeline/dto/timeline-pagination.dto.ts b/src/timeline/dto/timeline-pagination.dto.ts index f5ffbff7..c72adbd7 100644 --- a/src/timeline/dto/timeline-pagination.dto.ts +++ b/src/timeline/dto/timeline-pagination.dto.ts @@ -1,6 +1,6 @@ import { ApiProperty } from '@nestjs/swagger'; import { Type } from 'class-transformer'; -import { IsBoolean, IsInt, IsOptional, IsString, Max, MaxLength, Min, MIN } from 'class-validator'; +import { IsInt, IsOptional, IsString, Max, MaxLength, Min } from 'class-validator'; import { STRING_MAX_LENGTH } from 'src/constants/variables'; export class TimelinePaginationDto { diff --git a/src/timeline/dto/timeline-response.dto.ts b/src/timeline/dto/timeline-response.dto.ts index 7902cce2..920348bb 100644 --- a/src/timeline/dto/timeline-response.dto.ts +++ b/src/timeline/dto/timeline-response.dto.ts @@ -1,6 +1,5 @@ import { ApiProperty } from '@nestjs/swagger'; -import { Tweet } from 'src/tweets/entities'; -import { TweetResponseDTO, TweetsListResponseDTO } from 'src/tweets/dto'; +import { TweetResponseDTO } from 'src/tweets/dto'; export class TimelineResponseDto { @ApiProperty({ diff --git a/src/timeline/services/foryou/canditate-sources/interests-source.ts b/src/timeline/services/foryou/canditate-sources/interests-source.ts index 2fea31e4..b3b1bf9b 100644 --- a/src/timeline/services/foryou/canditate-sources/interests-source.ts +++ b/src/timeline/services/foryou/canditate-sources/interests-source.ts @@ -3,18 +3,16 @@ import { InjectRepository } from '@nestjs/typeorm'; import { plainToInstance } from 'class-transformer'; import { PaginationService } from 'src/shared/services/pagination/pagination.service'; import { ScoredCandidateDTO } from 'src/timeline/dto/scored-candidates.dto'; -import { Tweet } from 'src/tweets/entities'; import { UserPostsView } from 'src/tweets/entities/user-posts-view.entity'; import { TweetsRepository } from 'src/tweets/tweets.repository'; -import { UserInterests } from 'src/user/entities/user-interests.entity'; -import { Brackets, QueryResult, Repository, SelectQueryBuilder } from 'typeorm'; +import { Repository } from 'typeorm'; @Injectable() export class InterestsCandidateSource { constructor( private readonly tweet_repository: TweetsRepository, @InjectRepository(UserPostsView) - private user_posts_view_repository: Repository, + private readonly user_posts_view_repository: Repository, private readonly paginate_service: PaginationService ) {} @@ -115,7 +113,7 @@ export class InterestsCandidateSource { .setParameters(cte_query.getParameters()) .setParameter('user_id', user_id) .orderBy('ranked.post_date', 'DESC') - .addOrderBy('ranked.tweet_id', 'DESC') + .addOrderBy('ranked.id', 'DESC') .limit(limit); query = this.tweet_repository.attachUserInteractionBooleanFlags( @@ -138,10 +136,9 @@ export class InterestsCandidateSource { ); let interset_tweets = await query.getRawMany(); - console.log(interset_tweets); + // console.log(interset_tweets); if (interset_tweets.length === 0) { - console.log('no interest tweets, fetching random tweets'); query = this.user_posts_view_repository.manager .createQueryBuilder() .addCommonTableExpression(cte_query.getQuery(), 'filtered_tweets') @@ -202,7 +199,7 @@ export class InterestsCandidateSource { .setParameter('user_id', user_id) .orderBy('RANDOM()') .addOrderBy('ranked.post_date', 'DESC') - .addOrderBy('ranked.tweet_id', 'DESC') + .addOrderBy('ranked.id', 'DESC') .limit(limit); query = this.tweet_repository.attachUserInteractionBooleanFlags( diff --git a/src/timeline/services/foryou/for-you.service.spec.ts b/src/timeline/services/foryou/for-you.service.spec.ts index 53734152..003cc28a 100644 --- a/src/timeline/services/foryou/for-you.service.spec.ts +++ b/src/timeline/services/foryou/for-you.service.spec.ts @@ -1,17 +1,28 @@ import { Test, TestingModule } from '@nestjs/testing'; -import { InterestsCandidateSource } from './canditate-sources/interests-source'; -import { ScoredCandidateDTO } from 'src/timeline/dto/scored-candidates.dto'; import { ForyouService } from './for-you.service'; +import { getRepositoryToken } from '@nestjs/typeorm'; +import { ConfigService } from '@nestjs/config'; +import { Repository } from 'typeorm'; +import { UserTimelineCursor } from 'src/user/entities/user-timeline-cursor.entity'; +import { TimelineRedisService } from '../timeline-redis.service'; +import { TweetsRepository } from 'src/tweets/tweets.repository'; +import { RefillTimelineQueueJobService } from 'src/background-jobs/timeline/timeline.service'; +import { TimelineCandidatesService } from '../timeline-candidates.service'; +import { TweetResponseDTO } from 'src/tweets/dto'; describe('ForyouService', () => { let service: ForyouService; - let interest_source: jest.Mocked; + let timeline_cursor_repository: jest.Mocked>; + let timeline_redis_service: jest.Mocked; + let tweets_repository: jest.Mocked; + let refill_queue_service: jest.Mocked; + let timeline_candidates_service: jest.Mocked; + let config_service: jest.Mocked; const mock_user_id = 'user-123'; - const mock_cursor = 'cursor-abc'; const mock_limit = 20; - const mock_scored_candidate: ScoredCandidateDTO = { + const mock_tweet: TweetResponseDTO = { tweet_id: 'tweet-1', profile_user_id: 'profile-1', tweet_author_id: 'author-1', @@ -42,28 +53,69 @@ describe('ForyouService', () => { }, } as any; - const mock_interest_source_response = { - data: [mock_scored_candidate], - pagination: { - next_cursor: 'next-cursor-123', - has_more: true, - }, - }; + let mock_cursor: UserTimelineCursor; beforeEach(async () => { - const mock_interest_source_provider = { - provide: InterestsCandidateSource, - useValue: { - getCandidates: jest.fn().mockResolvedValue(mock_interest_source_response), - }, - }; + // Reset mock cursor for each test + mock_cursor = { + user_id: mock_user_id, + last_fetched_tweet_id: null, + last_fetched_position: 0, + last_updated_at: new Date(), + } as UserTimelineCursor; const module: TestingModule = await Test.createTestingModule({ - providers: [ForyouService, mock_interest_source_provider], + providers: [ + ForyouService, + { + provide: getRepositoryToken(UserTimelineCursor), + useValue: { + findOne: jest.fn(), + create: jest.fn(), + save: jest.fn(), + }, + }, + { + provide: TimelineRedisService, + useValue: { + getFromQueue: jest.fn(), + getQueueSize: jest.fn(), + }, + }, + { + provide: TweetsRepository, + useValue: { + getTweetsByIds: jest.fn(), + }, + }, + { + provide: RefillTimelineQueueJobService, + useValue: { + queueRefillTimelineQueue: jest.fn(), + }, + }, + { + provide: ConfigService, + useValue: { + get: jest.fn().mockReturnValue(20), + }, + }, + { + provide: TimelineCandidatesService, + useValue: { + getCandidates: jest.fn(), + }, + }, + ], }).compile(); service = module.get(ForyouService); - interest_source = module.get(InterestsCandidateSource); + timeline_cursor_repository = module.get(getRepositoryToken(UserTimelineCursor)); + timeline_redis_service = module.get(TimelineRedisService); + tweets_repository = module.get(TweetsRepository); + refill_queue_service = module.get(RefillTimelineQueueJobService); + timeline_candidates_service = module.get(TimelineCandidatesService); + config_service = module.get(ConfigService); }); afterEach(() => { @@ -75,139 +127,208 @@ describe('ForyouService', () => { }); describe('getForyouTimeline', () => { - it('should call interest source with correct parameters', async () => { - await service.getForyouTimeline(mock_user_id, mock_cursor, mock_limit); + it('should create new cursor if not exists', async () => { + timeline_cursor_repository.findOne.mockResolvedValue(null); + timeline_cursor_repository.create.mockReturnValue(mock_cursor); + timeline_cursor_repository.save.mockResolvedValue(mock_cursor); + timeline_redis_service.getFromQueue.mockResolvedValue([ + { tweet_id: 'tweet-1', created_at: '2024-01-01' }, + ]); + tweets_repository.getTweetsByIds.mockResolvedValue([mock_tweet]); + timeline_redis_service.getQueueSize.mockResolvedValue(100); - expect(interest_source.getCandidates).toHaveBeenCalledWith( + await service.getForyouTimeline(mock_user_id); + + expect(timeline_cursor_repository.create).toHaveBeenCalledWith({ + user_id: mock_user_id, + last_fetched_tweet_id: null, + last_fetched_position: 0, + }); + expect(timeline_cursor_repository.save).toHaveBeenCalled(); + }); + + it('should use existing cursor if found', async () => { + timeline_cursor_repository.findOne.mockResolvedValue(mock_cursor); + timeline_redis_service.getFromQueue.mockResolvedValue([ + { tweet_id: 'tweet-1', created_at: '2024-01-01' }, + ]); + tweets_repository.getTweetsByIds.mockResolvedValue([mock_tweet]); + timeline_redis_service.getQueueSize.mockResolvedValue(100); + + await service.getForyouTimeline(mock_user_id); + + expect(timeline_cursor_repository.create).not.toHaveBeenCalled(); + expect(timeline_redis_service.getFromQueue).toHaveBeenCalledWith(mock_user_id, 0, 20); + }); + + it('should fetch tweets from Redis queue', async () => { + timeline_cursor_repository.findOne.mockResolvedValue(mock_cursor); + timeline_redis_service.getFromQueue.mockResolvedValue([ + { tweet_id: 'tweet-1', created_at: '2024-01-01' }, + { tweet_id: 'tweet-2', created_at: '2024-01-02' }, + ]); + tweets_repository.getTweetsByIds.mockResolvedValue([mock_tweet]); + timeline_redis_service.getQueueSize.mockResolvedValue(100); + + const result = await service.getForyouTimeline(mock_user_id); + + expect(timeline_redis_service.getFromQueue).toHaveBeenCalledWith( mock_user_id, - mock_cursor, + 0, mock_limit ); - expect(interest_source.getCandidates).toHaveBeenCalledTimes(1); + expect(tweets_repository.getTweetsByIds).toHaveBeenCalledWith( + ['tweet-1', 'tweet-2'], + mock_user_id + ); }); - it('should return data from interest source', async () => { - const result = await service.getForyouTimeline(mock_user_id, mock_cursor, mock_limit); - - expect(result).toEqual(mock_interest_source_response); - expect(result.data).toEqual(mock_interest_source_response.data); - expect(result.data.length).toBe(1); - expect(result.data[0]).toEqual(mock_scored_candidate); - }); + it('should update cursor position after fetching', async () => { + const updated_cursor = { ...mock_cursor, last_fetched_position: 20 }; + timeline_cursor_repository.findOne.mockResolvedValue(mock_cursor); + timeline_cursor_repository.save.mockResolvedValue(updated_cursor); + timeline_redis_service.getFromQueue.mockResolvedValue([ + { tweet_id: 'tweet-1', created_at: '2024-01-01' }, + ]); + tweets_repository.getTweetsByIds.mockResolvedValue([mock_tweet]); + timeline_redis_service.getQueueSize.mockResolvedValue(100); - it('should return correct pagination from interest source', async () => { - const result = await service.getForyouTimeline(mock_user_id, mock_cursor, mock_limit); + await service.getForyouTimeline(mock_user_id); - expect(result.pagination).toEqual(mock_interest_source_response.pagination); - expect(result.pagination.next_cursor).toBe('next-cursor-123'); - expect(result.pagination.has_more).toBe(true); + expect(timeline_cursor_repository.save).toHaveBeenCalledWith( + expect.objectContaining({ + last_fetched_tweet_id: 'tweet-1', + last_fetched_position: 1, + }) + ); }); - it('should use default limit of 20 when not provided', async () => { - await service.getForyouTimeline(mock_user_id, mock_cursor); + it('should use fallback when queue is empty', async () => { + timeline_cursor_repository.findOne.mockResolvedValue(mock_cursor); + timeline_redis_service.getFromQueue.mockResolvedValue([]); + timeline_candidates_service.getCandidates.mockResolvedValue([ + { tweet_id: 'tweet-1', created_at: new Date(), category_id: 1, score: 10 }, + ]); + tweets_repository.getTweetsByIds.mockResolvedValue([mock_tweet]); + + const result = await service.getForyouTimeline(mock_user_id); - expect(interest_source.getCandidates).toHaveBeenCalledWith( + expect(timeline_candidates_service.getCandidates).toHaveBeenCalledWith( mock_user_id, - mock_cursor, - 20 + expect.any(Set), + mock_limit ); + expect(result.data).toEqual([mock_tweet]); + }); + + it('should return empty when queue and fallback are empty', async () => { + timeline_cursor_repository.findOne.mockResolvedValue(mock_cursor); + timeline_redis_service.getFromQueue.mockResolvedValue([]); + timeline_candidates_service.getCandidates.mockResolvedValue([]); + + const result = await service.getForyouTimeline(mock_user_id); + + expect(result.data).toEqual([]); + expect(result.pagination.has_more).toBe(false); }); - it('should work without cursor parameter', async () => { + it('should queue refill job after fetching', async () => { + timeline_cursor_repository.findOne.mockResolvedValue(mock_cursor); + timeline_redis_service.getFromQueue.mockResolvedValue([ + { tweet_id: 'tweet-1', created_at: '2024-01-01' }, + ]); + tweets_repository.getTweetsByIds.mockResolvedValue([mock_tweet]); + timeline_redis_service.getQueueSize.mockResolvedValue(100); + await service.getForyouTimeline(mock_user_id); - expect(interest_source.getCandidates).toHaveBeenCalledWith(mock_user_id, undefined, 20); + expect(refill_queue_service.queueRefillTimelineQueue).toHaveBeenCalledWith({ + user_id: mock_user_id, + refill_count: 20, + }); }); - it('should work with only user_id parameter', async () => { + it('should correctly calculate has_more based on queue size', async () => { + timeline_cursor_repository.findOne.mockResolvedValue(mock_cursor); + timeline_redis_service.getFromQueue.mockResolvedValue([ + { tweet_id: 'tweet-1', created_at: '2024-01-01' }, + ]); + tweets_repository.getTweetsByIds.mockResolvedValue([mock_tweet]); + timeline_redis_service.getQueueSize.mockResolvedValue(25); + const result = await service.getForyouTimeline(mock_user_id); - expect(result).toEqual(mock_interest_source_response); - expect(interest_source.getCandidates).toHaveBeenCalledWith(mock_user_id, undefined, 20); + expect(result.pagination.has_more).toBe(true); }); - it('should handle empty data from interest source', async () => { - const empty_response = { - data: [], - pagination: { - next_cursor: null, - has_more: false, - }, - }; - interest_source.getCandidates.mockResolvedValue(empty_response); + it('should return has_more false when at end of queue', async () => { + timeline_cursor_repository.findOne.mockResolvedValue(mock_cursor); + timeline_redis_service.getFromQueue.mockResolvedValue([ + { tweet_id: 'tweet-1', created_at: '2024-01-01' }, + ]); + tweets_repository.getTweetsByIds.mockResolvedValue([mock_tweet]); + timeline_redis_service.getQueueSize.mockResolvedValue(1); - const result = await service.getForyouTimeline(mock_user_id, mock_cursor, mock_limit); + const result = await service.getForyouTimeline(mock_user_id); - expect(result.data).toEqual([]); - expect(result.pagination.next_cursor).toBeNull(); expect(result.pagination.has_more).toBe(false); }); - it('should handle multiple scored candidates', async () => { - const multiple_candidates = { - data: [ - mock_scored_candidate, - { ...mock_scored_candidate, tweet_id: 'tweet-2' }, - { ...mock_scored_candidate, tweet_id: 'tweet-3' }, - ], - pagination: { - next_cursor: 'next-cursor-456', - has_more: true, - }, - }; - interest_source.getCandidates.mockResolvedValue(multiple_candidates); + it('should use default limit of 20 when not provided', async () => { + timeline_cursor_repository.findOne.mockResolvedValue(mock_cursor); + timeline_redis_service.getFromQueue.mockResolvedValue([ + { tweet_id: 'tweet-1', created_at: '2024-01-01' }, + ]); + tweets_repository.getTweetsByIds.mockResolvedValue([mock_tweet]); + timeline_redis_service.getQueueSize.mockResolvedValue(100); - const result = await service.getForyouTimeline(mock_user_id, mock_cursor, mock_limit); + await service.getForyouTimeline(mock_user_id); - expect(result.data.length).toBe(3); - expect(result.data[0].tweet_id).toBe('tweet-1'); - expect(result.data[1].tweet_id).toBe('tweet-2'); - expect(result.data[2].tweet_id).toBe('tweet-3'); + expect(timeline_redis_service.getFromQueue).toHaveBeenCalledWith(mock_user_id, 0, 20); }); it('should handle custom limit values', async () => { const custom_limit = 50; - await service.getForyouTimeline(mock_user_id, mock_cursor, custom_limit); + timeline_cursor_repository.findOne.mockResolvedValue(mock_cursor); + timeline_redis_service.getFromQueue.mockResolvedValue([ + { tweet_id: 'tweet-1', created_at: '2024-01-01' }, + ]); + tweets_repository.getTweetsByIds.mockResolvedValue([mock_tweet]); + timeline_redis_service.getQueueSize.mockResolvedValue(100); + + await service.getForyouTimeline(mock_user_id, undefined, custom_limit); - expect(interest_source.getCandidates).toHaveBeenCalledWith( + expect(timeline_redis_service.getFromQueue).toHaveBeenCalledWith( mock_user_id, - mock_cursor, + 0, custom_limit ); }); - it('should propagate errors from interest source', async () => { - const error = new Error('Database connection failed'); - interest_source.getCandidates.mockRejectedValue(error); + it('should filter out null tweets', async () => { + timeline_cursor_repository.findOne.mockResolvedValue(mock_cursor); + timeline_redis_service.getFromQueue.mockResolvedValue([ + { tweet_id: 'tweet-1', created_at: '2024-01-01' }, + { tweet_id: 'tweet-2', created_at: '2024-01-02' }, + ]); + tweets_repository.getTweetsByIds.mockResolvedValue([mock_tweet, null as any]); + timeline_redis_service.getQueueSize.mockResolvedValue(100); - await expect( - service.getForyouTimeline(mock_user_id, mock_cursor, mock_limit) - ).rejects.toThrow('Database connection failed'); - }); - - it('should handle null cursor correctly', async () => { - await service.getForyouTimeline(mock_user_id, null as any); + const result = await service.getForyouTimeline(mock_user_id); - expect(interest_source.getCandidates).toHaveBeenCalledWith(mock_user_id, null, 20); + expect(result.data.length).toBe(1); + expect(result.data[0]).toEqual(mock_tweet); }); - it('should preserve pagination has_more flag when false', async () => { - const response_with_no_more = { - data: [mock_scored_candidate], - pagination: { - next_cursor: null, - has_more: false, - }, - }; - interest_source.getCandidates.mockResolvedValue(response_with_no_more); - - const result = await service.getForyouTimeline(mock_user_id, mock_cursor, mock_limit); + it('should return correct structure', async () => { + timeline_cursor_repository.findOne.mockResolvedValue(mock_cursor); + timeline_redis_service.getFromQueue.mockResolvedValue([ + { tweet_id: 'tweet-1', created_at: '2024-01-01' }, + ]); + tweets_repository.getTweetsByIds.mockResolvedValue([mock_tweet]); + timeline_redis_service.getQueueSize.mockResolvedValue(100); - expect(result.pagination.has_more).toBe(false); - }); - - it('should return the exact structure from interest source', async () => { - const result = await service.getForyouTimeline(mock_user_id, mock_cursor, mock_limit); + const result = await service.getForyouTimeline(mock_user_id); expect(result).toHaveProperty('data'); expect(result).toHaveProperty('pagination'); diff --git a/src/timeline/services/foryou/for-you.service.ts b/src/timeline/services/foryou/for-you.service.ts index 0f0c07e0..7a43b330 100644 --- a/src/timeline/services/foryou/for-you.service.ts +++ b/src/timeline/services/foryou/for-you.service.ts @@ -1,29 +1,132 @@ import { Injectable } from '@nestjs/common'; -import { InterestsCandidateSource } from './canditate-sources/interests-source'; -import { ScoredCandidateDTO } from 'src/timeline/dto/scored-candidates.dto'; +import { InjectRepository } from '@nestjs/typeorm'; +import { ConfigService } from '@nestjs/config'; +import { Repository } from 'typeorm'; +import { UserTimelineCursor } from 'src/user/entities/user-timeline-cursor.entity'; +import { TimelineRedisService } from '../timeline-redis.service'; +import { TweetsRepository } from 'src/tweets/tweets.repository'; +import { RefillTimelineQueueJobService } from 'src/background-jobs/timeline/timeline.service'; +import { TweetResponseDTO } from 'src/tweets/dto'; +import { TimelineCandidatesService } from '../timeline-candidates.service'; + @Injectable() export class ForyouService { - constructor(private readonly interest_source: InterestsCandidateSource) {} + private readonly refill_batch_size: number; + + constructor( + @InjectRepository(UserTimelineCursor) + private readonly timeline_cursor_repository: Repository, + private readonly timeline_redis_service: TimelineRedisService, + private readonly tweets_repository: TweetsRepository, + private readonly refill_queue_job_service: RefillTimelineQueueJobService, + private readonly config_service: ConfigService, + private readonly timeline_candidates_service: TimelineCandidatesService + ) { + this.refill_batch_size = this.config_service.get('TIMELINE_REFILL_BATCH_SIZE', 20); + } async getForyouTimeline( user_id: string, - cursor?: string, + cursor?: string, // Keep for API compatibility but not used limit: number = 20 ): Promise<{ - data: ScoredCandidateDTO[]; + data: TweetResponseDTO[]; pagination: { next_cursor: string | null; has_more: boolean }; }> { - const { data: interest_tweets, pagination } = await this.interest_source.getCandidates( + let timeline_cursor = await this.timeline_cursor_repository.findOne({ + where: { user_id }, + }); + + if (!timeline_cursor) { + console.log(`[ForYou API] No cursor found, creating new one for user ${user_id}`); + timeline_cursor = this.timeline_cursor_repository.create({ + user_id, + last_fetched_tweet_id: null, + last_fetched_position: 0, + }); + await this.timeline_cursor_repository.save(timeline_cursor); + } else { + // console.log(`[ForYou API] Found cursor for user ${user_id}, last tweet: ${timeline_cursor.last_fetched_tweet_id}`); + } + + const start_index = timeline_cursor.last_fetched_position || 0; + // console.log(`[ForYou API] Starting from position ${start_index} in queue`); + + const redis_tweets = await this.timeline_redis_service.getFromQueue( user_id, - cursor, + start_index, limit ); - // apply final combined cursor from each source + if (redis_tweets.length === 0) { + console.log( + `[ForYou API] No tweets found in Redis queue for user ${user_id} - using direct fallback` + ); + + // Fallback: Fetch tweets directly from candidates service + const candidates = await this.timeline_candidates_service.getCandidates( + user_id, + new Set(), // No exclusions for fresh start + limit + ); + + if (candidates.length === 0) { + console.log(`[ForYou API] No candidates found either, returning empty`); + return { + data: [], + pagination: { next_cursor: null, has_more: false }, + }; + } + + const candidate_tweet_ids = candidates.map((c) => c.tweet_id); + const fallback_tweets = await this.tweets_repository.getTweetsByIds( + candidate_tweet_ids, + user_id + ); + return { + data: fallback_tweets, + pagination: { next_cursor: 'next', has_more: true }, + }; + } + + const tweet_ids = redis_tweets.map((t) => t.tweet_id); + // console.log(`[ForYou API] Fetching ${tweet_ids.length} tweets from DB, IDs:`, tweet_ids.slice(0, 3)); + const tweets = await this.tweets_repository.getTweetsByIds(tweet_ids, user_id); + + // Filter out tweets from blocked/muted users + const filtered_tweets = tweets.filter((tweet) => { + // The query should already handle blocked/muted, but double-check + return tweet !== null; + }); + + // Update cursor position + if (redis_tweets.length > 0) { + const last_redis_tweet = redis_tweets[redis_tweets.length - 1]; + const new_position = start_index + redis_tweets.length; + const previous_position = timeline_cursor.last_fetched_position; + + timeline_cursor.last_fetched_tweet_id = last_redis_tweet.tweet_id; + timeline_cursor.last_fetched_position = new_position; + timeline_cursor.last_updated_at = new Date(); + await this.timeline_cursor_repository.save(timeline_cursor); + } + + // background job to refill queue + const refill_count = Math.max(limit, this.refill_batch_size); + await this.refill_queue_job_service.queueRefillTimelineQueue({ + user_id, + refill_count, + }); + // Check if there are more tweets available + const remaining_size = await this.timeline_redis_service.getQueueSize(user_id); + const has_more = remaining_size > start_index + redis_tweets.length; return { - data: interest_tweets, - pagination, + data: filtered_tweets, + pagination: { + next_cursor: has_more ? 'next' : null, // Dummy cursor for compatibility + has_more, + }, }; } } diff --git a/src/timeline/services/timeline-candidates.service.spec.ts b/src/timeline/services/timeline-candidates.service.spec.ts new file mode 100644 index 00000000..9da66ef4 --- /dev/null +++ b/src/timeline/services/timeline-candidates.service.spec.ts @@ -0,0 +1,331 @@ +import { Test, TestingModule } from '@nestjs/testing'; +import { ICandidateTweet, TimelineCandidatesService } from './timeline-candidates.service'; +import { getRepositoryToken } from '@nestjs/typeorm'; +import { ConfigService } from '@nestjs/config'; +import { Repository } from 'typeorm'; +import { UserInterests } from 'src/user/entities/user-interests.entity'; +import { TweetCategory } from 'src/tweets/entities/tweet-category.entity'; +import { Tweet } from 'src/tweets/entities/tweet.entity'; +import { Category } from 'src/category/entities/category.entity'; +import { InitTimelineQueueJobService } from 'src/background-jobs/timeline/timeline.service'; + +describe('TimelineCandidatesService', () => { + let service: TimelineCandidatesService; + let user_interests_repository: jest.Mocked>; + let tweet_category_repository: jest.Mocked>; + let tweet_repository: jest.Mocked>; + let category_repository: jest.Mocked>; + let config_service: jest.Mocked; + let init_timeline_queue_job_service: jest.Mocked; + + const mock_user_id = 'user-123'; + const mock_user_interests = [ + { user_id: mock_user_id, category_id: '1', score: 10 }, + { user_id: mock_user_id, category_id: '2', score: 5 }, + ] as unknown as UserInterests[]; + + const mock_candidate_tweets: ICandidateTweet[] = [ + { tweet_id: 'tweet-1', created_at: new Date(), category_id: 1, score: 10 }, + { tweet_id: 'tweet-2', created_at: new Date(), category_id: 1, score: 8 }, + { tweet_id: 'tweet-3', created_at: new Date(), category_id: 2, score: 5 }, + ]; + + const create_mock_query_builder = () => { + const qb: any = { + innerJoin: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + andWhere: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + addOrderBy: jest.fn().mockReturnThis(), + limit: jest.fn().mockReturnThis(), + getRawMany: jest.fn().mockResolvedValue([]), + }; + return qb; + }; + + beforeEach(async () => { + const module: TestingModule = await Test.createTestingModule({ + providers: [ + TimelineCandidatesService, + { + provide: getRepositoryToken(UserInterests), + useValue: { + find: jest.fn(), + create: jest.fn(), + save: jest.fn(), + }, + }, + { + provide: getRepositoryToken(TweetCategory), + useValue: { + createQueryBuilder: jest.fn(), + }, + }, + { + provide: getRepositoryToken(Tweet), + useValue: { + createQueryBuilder: jest.fn(), + }, + }, + { + provide: getRepositoryToken(Category), + useValue: { + createQueryBuilder: jest.fn(() => ({ + orderBy: jest.fn().mockReturnThis(), + limit: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([]), + })), + }, + }, + { + provide: InitTimelineQueueJobService, + useValue: { + queueInitTimelineQueue: jest.fn().mockResolvedValue(undefined), + }, + }, + { + provide: ConfigService, + useValue: { + get: jest.fn((key, default_value) => { + if (key === 'TIMELINE_TWEET_FRESHNESS_DAYS') return 7; + return default_value; + }), + }, + }, + ], + }).compile(); + + service = module.get(TimelineCandidatesService); + user_interests_repository = module.get(getRepositoryToken(UserInterests)); + tweet_category_repository = module.get(getRepositoryToken(TweetCategory)); + tweet_repository = module.get(getRepositoryToken(Tweet)); + category_repository = module.get(getRepositoryToken(Category)); + init_timeline_queue_job_service = module.get(InitTimelineQueueJobService); + config_service = module.get(ConfigService); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should be defined', () => { + expect(service).toBeDefined(); + }); + + describe('getCandidates', () => { + it('should return candidates based on user interests', async () => { + user_interests_repository.find.mockResolvedValue(mock_user_interests); + + const qb = create_mock_query_builder(); + // Return enough tweets to avoid fallback + const many_tweets = Array.from({ length: 10 }, (_, i) => ({ + tweet_id: `tweet-${i}`, + created_at: new Date(), + category_id: 1, + percentage: 100 - i * 5, + })); + qb.getRawMany.mockResolvedValue(many_tweets); + tweet_category_repository.createQueryBuilder.mockReturnValue(qb); + + const result = await service.getCandidates(mock_user_id, new Set(), 10); + + expect(user_interests_repository.find).toHaveBeenCalledWith({ + where: { user_id: mock_user_id }, + order: { score: 'DESC' }, + }); + expect(result.length).toBeGreaterThan(0); + }); + + it('should exclude specified tweet IDs', async () => { + user_interests_repository.find.mockResolvedValue(mock_user_interests); + + const qb = create_mock_query_builder(); + // Return enough tweets to avoid fallback + const many_tweets = Array.from({ length: 12 }, (_, i) => ({ + tweet_id: i === 5 ? 'tweet-excluded' : `tweet-${i}`, + created_at: new Date(), + category_id: 1, + percentage: 100 - i * 5, + })); + qb.getRawMany.mockResolvedValue(many_tweets); + tweet_category_repository.createQueryBuilder.mockReturnValue(qb); + + const excluded = new Set(['tweet-excluded']); + const result = await service.getCandidates(mock_user_id, excluded, 10); + + expect(result.every((c) => c.tweet_id !== 'tweet-excluded')).toBe(true); + }); + + it('should use random fallback when user has no interests', async () => { + user_interests_repository.find.mockResolvedValue([]); + + const qb = create_mock_query_builder(); + qb.getRawMany.mockResolvedValue([ + { tweet_id: 'random-1', created_at: new Date() }, + { tweet_id: 'random-2', created_at: new Date() }, + ]); + tweet_repository.createQueryBuilder.mockReturnValue(qb); + + const result = await service.getCandidates(mock_user_id, new Set(), 10); + + expect(tweet_repository.createQueryBuilder).toHaveBeenCalled(); + expect(result.length).toBeGreaterThanOrEqual(0); + }); + + it('should limit results to requested count', async () => { + user_interests_repository.find.mockResolvedValue(mock_user_interests); + + const qb = create_mock_query_builder(); + const many_tweets = Array.from({ length: 50 }, (_, i) => ({ + tweet_id: `tweet-${i}`, + created_at: new Date(), + category_id: 1, + percentage: 100, + })); + qb.getRawMany.mockResolvedValue(many_tweets); + tweet_category_repository.createQueryBuilder.mockReturnValue(qb); + + const limit = 20; + const result = await service.getCandidates(mock_user_id, new Set(), limit); + + expect(result.length).toBeLessThanOrEqual(limit); + }); + + it('should sort candidates by score', async () => { + user_interests_repository.find.mockResolvedValue(mock_user_interests); + + const qb = create_mock_query_builder(); + const many_tweets = Array.from({ length: 10 }, (_, i) => ({ + tweet_id: `tweet-${i}`, + created_at: new Date(), + category_id: 1, + percentage: 50 + (i % 2) * 50, // Mix of 50 and 100 percentages + })); + qb.getRawMany.mockResolvedValue(many_tweets); + tweet_category_repository.createQueryBuilder.mockReturnValue(qb); + + const result = await service.getCandidates(mock_user_id, new Set(), 10); + + // Verify results are sorted by score descending + for (let i = 1; i < result.length; i++) { + expect(result[i - 1].score).toBeGreaterThanOrEqual(result[i].score); + } + }); + + it('should use fallback when not enough candidates found', async () => { + user_interests_repository.find.mockResolvedValue(mock_user_interests); + + const qb_category = create_mock_query_builder(); + qb_category.getRawMany.mockResolvedValue([ + { + tweet_id: 'tweet-1', + created_at: new Date(), + category_id: 1, + percentage: 100, + }, + ]); + tweet_category_repository.createQueryBuilder.mockReturnValue(qb_category); + + const qb_fallback = create_mock_query_builder(); + qb_fallback.getRawMany.mockResolvedValue([ + { tweet_id: 'fallback-1', created_at: new Date() }, + { tweet_id: 'fallback-2', created_at: new Date() }, + ]); + tweet_repository.createQueryBuilder.mockReturnValue(qb_fallback); + + const result = await service.getCandidates(mock_user_id, new Set(), 10); + + // Should have attempted to get fallback tweets + expect(tweet_repository.createQueryBuilder).toHaveBeenCalled(); + }); + + it('should calculate score based on interest and percentage', async () => { + user_interests_repository.find.mockResolvedValue([ + { user_id: mock_user_id, category_id: '1', score: 10 } as unknown as UserInterests, + ]); + + const qb = create_mock_query_builder(); + const many_tweets = Array.from({ length: 10 }, (_, i) => ({ + tweet_id: `tweet-${i}`, + created_at: new Date(), + category_id: 1, + percentage: 50, + })); + qb.getRawMany.mockResolvedValue(many_tweets); + tweet_category_repository.createQueryBuilder.mockReturnValue(qb); + + const result = await service.getCandidates(mock_user_id, new Set(), 10); + + if (result.length > 0) { + // Score should be interest_score * (percentage / 100) = 10 * 0.5 = 5 + expect(result[0].score).toBe(5); + } + }); + + it('should exclude user own tweets', async () => { + user_interests_repository.find.mockResolvedValue(mock_user_interests); + + const qb = create_mock_query_builder(); + const many_tweets = Array.from({ length: 10 }, (_, i) => ({ + tweet_id: `tweet-${i}`, + created_at: new Date(), + category_id: 1, + percentage: 100, + })); + qb.getRawMany.mockResolvedValue(many_tweets); + tweet_category_repository.createQueryBuilder.mockReturnValue(qb); + + await service.getCandidates(mock_user_id, new Set(), 10); + + // Verify the query builder excluded user's own tweets + expect(qb.andWhere).toHaveBeenCalledWith('tweet.user_id != :user_id', { + user_id: mock_user_id, + }); + }); + + it('should exclude blocked users tweets', async () => { + user_interests_repository.find.mockResolvedValue(mock_user_interests); + + const qb = create_mock_query_builder(); + const many_tweets = Array.from({ length: 10 }, (_, i) => ({ + tweet_id: `tweet-${i}`, + created_at: new Date(), + category_id: 1, + percentage: 100, + })); + qb.getRawMany.mockResolvedValue(many_tweets); + tweet_category_repository.createQueryBuilder.mockReturnValue(qb); + + await service.getCandidates(mock_user_id, new Set(), 10); + + // Verify blocked users are excluded + expect(qb.andWhere).toHaveBeenCalledWith( + expect.stringContaining('user_blocks'), + expect.any(Object) + ); + }); + + it('should exclude muted users tweets', async () => { + user_interests_repository.find.mockResolvedValue(mock_user_interests); + + const qb = create_mock_query_builder(); + const many_tweets = Array.from({ length: 10 }, (_, i) => ({ + tweet_id: `tweet-${i}`, + created_at: new Date(), + category_id: 1, + percentage: 100, + })); + qb.getRawMany.mockResolvedValue(many_tweets); + tweet_category_repository.createQueryBuilder.mockReturnValue(qb); + + await service.getCandidates(mock_user_id, new Set(), 10); + + // Verify muted users are excluded + expect(qb.andWhere).toHaveBeenCalledWith( + expect.stringContaining('user_mutes'), + expect.any(Object) + ); + }); + }); +}); diff --git a/src/timeline/services/timeline-candidates.service.ts b/src/timeline/services/timeline-candidates.service.ts new file mode 100644 index 00000000..4e9a6d3c --- /dev/null +++ b/src/timeline/services/timeline-candidates.service.ts @@ -0,0 +1,331 @@ +import { Injectable } from '@nestjs/common'; +import { InjectRepository } from '@nestjs/typeorm'; +import { ConfigService } from '@nestjs/config'; +import { Repository } from 'typeorm'; +import { UserInterests } from 'src/user/entities/user-interests.entity'; +import { TweetCategory } from 'src/tweets/entities/tweet-category.entity'; +import { Tweet } from 'src/tweets/entities/tweet.entity'; +import { Category } from 'src/category/entities/category.entity'; +import { InitTimelineQueueJobService } from 'src/background-jobs/timeline/timeline.service'; +import { JOB_DELAYS } from 'src/background-jobs/constants/queue.constants'; + +export interface ICandidateTweet { + tweet_id: string; + created_at: Date; + category_id: number; + score: number; +} + +@Injectable() +export class TimelineCandidatesService { + private readonly tweet_freshness_days: number; + LIMIT_FACTOR: number; + + constructor( + @InjectRepository(UserInterests) + private readonly user_interests_repository: Repository, + @InjectRepository(TweetCategory) + private readonly tweet_category_repository: Repository, + @InjectRepository(Tweet) + private readonly tweet_repository: Repository, + @InjectRepository(Category) + private readonly category_repository: Repository, + private readonly config_service: ConfigService, + private readonly init_timeline_queue_job_service: InitTimelineQueueJobService + ) { + this.tweet_freshness_days = this.config_service.get( + 'TIMELINE_TWEET_FRESHNESS_DAYS', + 7 + ); + + this.LIMIT_FACTOR = 500; + } + + async getCandidates( + user_id: string, + excluded_tweet_ids: Set, + limit: number + ): Promise { + const user_interests = await this.user_interests_repository.find({ + where: { user_id }, + order: { score: 'DESC' }, + }); + + if (user_interests.length === 0) { + console.log(`[Candidates] No interests found, assigning 3 random interests`); + // No interests means that the user makes a refresh before inserting their interests + // Assign 3 random interests and trigger the init timeline queue job + await this.assignRandomInterests(user_id); + await this.init_timeline_queue_job_service.queueInitTimelineQueue({ user_id }); + // for now, return random tweets while the background job processes + return this.getRandomFreshTweets(user_id, excluded_tweet_ids, limit); + } + + // Get freshness cutoff date + const cutoff_date = new Date(); + cutoff_date.setDate(cutoff_date.getDate() - this.tweet_freshness_days); + + // Calculate total score and percentage for each interest + const total_score = user_interests.reduce((sum, interest) => sum + interest.score, 0); + const candidates: ICandidateTweet[] = []; + + // Get tweets for each interest category based on score percentage + for (const interest of user_interests) { + const score_percentage = interest.score / total_score; + const tweets_for_this_category = Math.ceil(limit * score_percentage); + + const category_tweets = await this.getTweetsForCategory( + user_id, + interest.category_id as any, + cutoff_date, + excluded_tweet_ids, + tweets_for_this_category, + interest.score + ); + + candidates.push(...category_tweets); + + if (candidates.length >= limit) { + break; + } + } + + // If we don't have enough candidates, try fallback + if (candidates.length < limit) { + const additional_needed = limit - candidates.length; + console.log( + `[Candidates] Only found ${candidates.length}/${limit} tweets, fetching ${additional_needed} from fallback` + ); + const fallback_tweets = await this.getFallbackTweets( + user_id, + excluded_tweet_ids, + additional_needed, + new Set(user_interests.map((i) => i.category_id as any)) + ); + console.log(`[Candidates] Fallback provided ${fallback_tweets.length} tweets`); + candidates.push(...fallback_tweets); + } + + const final_candidates = candidates.sort((a, b) => b.score - a.score).slice(0, limit); + return final_candidates; + } + + private async getTweetsForCategory( + user_id: string, + category_id: number, + cutoff_date: Date, + excluded_tweet_ids: Set, + limit: number, + interest_score: number + ): Promise { + const query = this.tweet_category_repository + .createQueryBuilder('tc') + .innerJoin('tc.tweet', 'tweet') + .innerJoin('tweet.user', 'user') + .where('tc.category_id = :category_id', { category_id }) + // .andWhere('tweet.created_at >= :cutoff_date', { cutoff_date }) + .andWhere('tweet.deleted_at IS NULL') + .andWhere('user.deleted_at IS NULL') + // Exclude blocked users + .andWhere( + `tweet.user_id NOT IN ( + SELECT blocked_id FROM user_blocks WHERE blocker_id = :user_id + )`, + { user_id } + ) + // Exclude muted users + .andWhere( + `tweet.user_id NOT IN ( + SELECT muted_id FROM user_mutes WHERE muter_id = :user_id + )`, + { user_id } + ) + // Exclude user's own tweets + .andWhere('tweet.user_id != :user_id', { user_id }) + .select([ + 'tweet.tweet_id AS tweet_id', + 'tweet.created_at AS created_at', + 'tc.category_id AS category_id', + 'tc.percentage AS percentage', + ]) + .orderBy('tweet.created_at', 'DESC'); + // commented out till we test performance + // .limit(limit * this.LIMIT_FACTOR); // Get more to filter out seen ones + + const results = await query.getRawMany(); + + const candidates: ICandidateTweet[] = []; + for (const result of results) { + if (excluded_tweet_ids.has(result.tweet_id)) { + continue; + } + + const score = interest_score * (result.percentage / 100); + candidates.push({ + tweet_id: result.tweet_id, + created_at: result.created_at, + category_id: result.category_id, + score, + }); + + if (candidates.length >= limit) { + break; + } + } + + return candidates; + } + + private async getFallbackTweets( + user_id: string, + excluded_tweet_ids: Set, + limit: number, + user_category_ids: Set + ): Promise { + const cutoff_date = new Date(); + cutoff_date.setDate(cutoff_date.getDate() - this.tweet_freshness_days); + + const query = this.tweet_repository + .createQueryBuilder('tweet') + .innerJoin('tweet.user', 'user') + .where('tweet.created_at >= :cutoff_date', { cutoff_date }) + .andWhere('tweet.deleted_at IS NULL') + .andWhere('user.deleted_at IS NULL') + .andWhere( + `tweet.user_id NOT IN ( + SELECT blocked_id FROM user_blocks WHERE blocker_id = :user_id + )`, + { user_id } + ) + .andWhere( + `tweet.user_id NOT IN ( + SELECT muted_id FROM user_mutes WHERE muter_id = :user_id + )`, + { user_id } + ) + .andWhere('tweet.user_id != :user_id', { user_id }) + .select([ + 'tweet.tweet_id AS tweet_id', + 'tweet.created_at AS created_at', + 'tweet.num_likes AS num_likes', + 'tweet.num_views AS num_views', + ]) + .orderBy('tweet.num_likes', 'DESC') + .addOrderBy('tweet.num_views', 'DESC') + .addOrderBy('tweet.created_at', 'DESC') + .limit(limit * this.LIMIT_FACTOR); + + const results = await query.getRawMany(); + + const candidates: ICandidateTweet[] = []; + for (const result of results) { + if (excluded_tweet_ids.has(result.tweet_id)) { + continue; + } + + // Score based on engagement + const score = result.num_likes * 2 + result.num_views * 0.1; + + candidates.push({ + tweet_id: result.tweet_id, + created_at: result.created_at, + category_id: 0, // No specific category + score, + }); + + if (candidates.length >= limit) { + break; + } + } + + return candidates; + } + + private async getRandomFreshTweets( + user_id: string, + excluded_tweet_ids: Set, + limit: number + ): Promise { + const cutoff_date = new Date(); + cutoff_date.setDate(cutoff_date.getDate() - this.tweet_freshness_days); + + const query = this.tweet_repository + .createQueryBuilder('tweet') + .innerJoin('tweet.user', 'user') + .where('tweet.created_at >= :cutoff_date', { cutoff_date }) + .andWhere('tweet.deleted_at IS NULL') + .andWhere('user.deleted_at IS NULL') + .andWhere( + `tweet.user_id NOT IN ( + SELECT blocked_id FROM user_blocks WHERE blocker_id = :user_id + )`, + { user_id } + ) + .andWhere( + `tweet.user_id NOT IN ( + SELECT muted_id FROM user_mutes WHERE muter_id = :user_id + )`, + { user_id } + ) + .andWhere('tweet.user_id != :user_id', { user_id }) + .select([ + 'tweet.tweet_id AS tweet_id', + 'tweet.created_at AS created_at', + 'tweet.num_likes AS num_likes', + ]) + .orderBy('RANDOM()') + .limit(limit * this.LIMIT_FACTOR); + + const results = await query.getRawMany(); + + const candidates: ICandidateTweet[] = []; + for (const result of results) { + if (excluded_tweet_ids.has(result.tweet_id)) { + continue; + } + + candidates.push({ + tweet_id: result.tweet_id, + created_at: result.created_at, + category_id: 0, + score: result.num_likes || 0, + }); + + if (candidates.length >= limit) { + break; + } + } + + return candidates; + } + + private async assignRandomInterests(user_id: string): Promise { + try { + const random_categories = await this.category_repository + .createQueryBuilder('category') + .orderBy('RANDOM()') + .limit(3) + .getMany(); + + if (random_categories.length === 0) { + console.error(`[Candidates] No categories available to assign`); + return; + } + + const user_interests = random_categories.map((category) => + this.user_interests_repository.create({ + user_id, + category_id: String(category.id), + score: 100, + }) + ); + + await this.user_interests_repository.save(user_interests); + } catch (error) { + console.error( + `[Candidates] Error assigning random interests to user ${user_id}:`, + error + ); + } + } +} diff --git a/src/timeline/services/timeline-redis.service.spec.ts b/src/timeline/services/timeline-redis.service.spec.ts new file mode 100644 index 00000000..93b3011c --- /dev/null +++ b/src/timeline/services/timeline-redis.service.spec.ts @@ -0,0 +1,284 @@ +import { Test, TestingModule } from '@nestjs/testing'; +import { ITimelineTweetData, TimelineRedisService } from './timeline-redis.service'; +import type Redis from 'ioredis'; + +describe('TimelineRedisService', () => { + let service: TimelineRedisService; + let redis_client: jest.Mocked; + + const mock_user_id = 'user-123'; + const mock_tweets: ITimelineTweetData[] = [ + { tweet_id: 'tweet-1', created_at: '2024-01-01T00:00:00.000Z' }, + { tweet_id: 'tweet-2', created_at: '2024-01-02T00:00:00.000Z' }, + { tweet_id: 'tweet-3', created_at: '2024-01-03T00:00:00.000Z' }, + ]; + + beforeEach(async () => { + const module: TestingModule = await Test.createTestingModule({ + providers: [ + TimelineRedisService, + { + provide: 'default_IORedisModuleConnectionToken', + useValue: { + pipeline: jest.fn(), + rpush: jest.fn(), + lrange: jest.fn(), + llen: jest.fn(), + lrem: jest.fn(), + del: jest.fn(), + ltrim: jest.fn().mockResolvedValue('OK'), + }, + }, + ], + }).compile(); + + service = module.get(TimelineRedisService); + redis_client = module.get('default_IORedisModuleConnectionToken'); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should be defined', () => { + expect(service).toBeDefined(); + }); + + describe('addToQueue', () => { + it('should add tweets to queue', async () => { + const mock_pipeline = { + rpush: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([ + [null, 1], + [null, 2], + [null, 3], + ]), + }; + redis_client.pipeline.mockReturnValue(mock_pipeline as any); + + const result = await service.addToQueue(mock_user_id, mock_tweets); + + expect(redis_client.pipeline).toHaveBeenCalled(); + expect(mock_pipeline.rpush).toHaveBeenCalledTimes(3); + expect(result).toBe(3); + }); + + it('should return 0 when no tweets provided', async () => { + const result = await service.addToQueue(mock_user_id, []); + + expect(result).toBe(0); + expect(redis_client.pipeline).not.toHaveBeenCalled(); + }); + + it('should handle pipeline errors', async () => { + const mock_pipeline = { + rpush: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue(null), + }; + redis_client.pipeline.mockReturnValue(mock_pipeline as any); + + const result = await service.addToQueue(mock_user_id, mock_tweets); + + expect(result).toBe(0); + }); + }); + + describe('getFromQueue', () => { + it('should fetch tweets from queue', async () => { + const serialized = mock_tweets.map((t) => JSON.stringify(t)); + redis_client.lrange.mockResolvedValue(serialized); + + const result = await service.getFromQueue(mock_user_id, 0, 3); + + expect(redis_client.lrange).toHaveBeenCalledWith('timeline:foryou:user-123', 0, 2); + expect(result).toEqual(mock_tweets); + }); + + it('should return empty array when no tweets found', async () => { + redis_client.lrange.mockResolvedValue([]); + + const result = await service.getFromQueue(mock_user_id, 0, 10); + + expect(result).toEqual([]); + }); + + it('should handle pagination correctly', async () => { + const serialized = [JSON.stringify(mock_tweets[1])]; + redis_client.lrange.mockResolvedValue(serialized); + + const result = await service.getFromQueue(mock_user_id, 1, 1); + + expect(redis_client.lrange).toHaveBeenCalledWith('timeline:foryou:user-123', 1, 1); + expect(result).toEqual([mock_tweets[1]]); + }); + }); + + describe('getQueueSize', () => { + it('should return queue size', async () => { + redis_client.llen.mockResolvedValue(100); + + const result = await service.getQueueSize(mock_user_id); + + expect(redis_client.llen).toHaveBeenCalledWith('timeline:foryou:user-123'); + expect(result).toBe(100); + }); + + it('should return 0 for empty queue', async () => { + redis_client.llen.mockResolvedValue(0); + + const result = await service.getQueueSize(mock_user_id); + + expect(result).toBe(0); + }); + }); + + describe('isTweetInQueue', () => { + it('should return true when tweet exists', async () => { + const serialized = mock_tweets.map((t) => JSON.stringify(t)); + redis_client.llen.mockResolvedValue(3); + redis_client.lrange.mockResolvedValue(serialized); + + const result = await service.isTweetInQueue(mock_user_id, 'tweet-2'); + + expect(result).toBe(true); + }); + + it('should return false when tweet does not exist', async () => { + const serialized = mock_tweets.map((t) => JSON.stringify(t)); + redis_client.llen.mockResolvedValue(3); + redis_client.lrange.mockResolvedValue(serialized); + + const result = await service.isTweetInQueue(mock_user_id, 'tweet-999'); + + expect(result).toBe(false); + }); + + it('should return false for empty queue', async () => { + redis_client.llen.mockResolvedValue(0); + redis_client.lrange.mockResolvedValue([]); + + const result = await service.isTweetInQueue(mock_user_id, 'tweet-1'); + + expect(result).toBe(false); + }); + }); + + describe('getTweetIdsInQueue', () => { + it('should return all tweet IDs in queue', async () => { + const serialized = mock_tweets.map((t) => JSON.stringify(t)); + redis_client.llen.mockResolvedValue(3); + redis_client.lrange.mockResolvedValue(serialized); + + const result = await service.getTweetIdsInQueue(mock_user_id); + + expect(result.size).toBe(3); + expect(result.has('tweet-1')).toBe(true); + expect(result.has('tweet-2')).toBe(true); + expect(result.has('tweet-3')).toBe(true); + }); + + it('should return empty set for empty queue', async () => { + redis_client.llen.mockResolvedValue(0); + redis_client.lrange.mockResolvedValue([]); + + const result = await service.getTweetIdsInQueue(mock_user_id); + + expect(result.size).toBe(0); + }); + }); + + describe('removeOldTweets', () => { + it('should remove tweets older than cutoff date', async () => { + const all_tweets = [ + { tweet_id: 'tweet-1', created_at: '2024-01-01T00:00:00.000Z' }, // old + { tweet_id: 'tweet-2', created_at: '2024-01-10T00:00:00.000Z' }, // new + { tweet_id: 'tweet-3', created_at: '2024-01-02T00:00:00.000Z' }, // old + ]; + const serialized = all_tweets.map((t) => JSON.stringify(t)); + redis_client.llen.mockResolvedValue(3); + redis_client.lrange.mockResolvedValue(serialized); + + const mock_pipeline = { + lrem: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([]), + }; + redis_client.pipeline.mockReturnValue(mock_pipeline as any); + + const cutoff_timestamp = '2024-01-05T00:00:00.000Z'; + const result = await service.removeOldTweets(mock_user_id, cutoff_timestamp); + + expect(mock_pipeline.lrem).toHaveBeenCalledTimes(2); + expect(mock_pipeline.exec).toHaveBeenCalledTimes(1); + expect(result).toBe(2); + }); + + it('should return 0 when no old tweets found', async () => { + const serialized = mock_tweets.map((t) => JSON.stringify(t)); + redis_client.llen.mockResolvedValue(3); + redis_client.lrange.mockResolvedValue(serialized); + + const mock_pipeline = { + lrem: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([]), + }; + redis_client.pipeline.mockReturnValue(mock_pipeline as any); + + const cutoff_timestamp = '2023-01-01T00:00:00.000Z'; + const result = await service.removeOldTweets(mock_user_id, cutoff_timestamp); + + expect(mock_pipeline.lrem).not.toHaveBeenCalled(); + expect(mock_pipeline.exec).not.toHaveBeenCalled(); + expect(result).toBe(0); + }); + }); + + describe('initializeQueue', () => { + it('should clear and initialize queue with new tweets', async () => { + const mock_pipeline = { + rpush: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([ + [null, 1], + [null, 2], + [null, 3], + ]), + }; + redis_client.del.mockResolvedValue(1); + redis_client.pipeline.mockReturnValue(mock_pipeline as any); + + const result = await service.initializeQueue(mock_user_id, mock_tweets); + + expect(redis_client.del).toHaveBeenCalledWith('timeline:foryou:user-123'); + expect(result).toBe(3); + }); + + it('should return 0 for empty tweet array', async () => { + redis_client.del.mockResolvedValue(1); + + const result = await service.initializeQueue(mock_user_id, []); + + expect(redis_client.del).toHaveBeenCalled(); + expect(result).toBe(0); + }); + }); + + describe('trimQueue', () => { + it('should trim queue to max size', async () => { + redis_client.llen.mockResolvedValue(7000); + redis_client.ltrim.mockResolvedValue('OK'); + + const result = await service.trimQueue(mock_user_id, 5000); + + expect(redis_client.ltrim).toHaveBeenCalledWith('timeline:foryou:user-123', 2000, -1); + expect(result).toBe(2000); + }); + + it('should return 0 when queue is smaller than max', async () => { + redis_client.llen.mockResolvedValue(100); + + const result = await service.trimQueue(mock_user_id, 200); + + expect(redis_client.ltrim).not.toHaveBeenCalled(); + expect(result).toBe(0); + }); + }); +}); diff --git a/src/timeline/services/timeline-redis.service.ts b/src/timeline/services/timeline-redis.service.ts new file mode 100644 index 00000000..e36e2129 --- /dev/null +++ b/src/timeline/services/timeline-redis.service.ts @@ -0,0 +1,194 @@ +import { InjectRedis } from '@nestjs-modules/ioredis'; +import { Injectable } from '@nestjs/common'; +import Redis from 'ioredis'; + +export interface ITimelineTweetData { + tweet_id: string; + created_at: string; // ISO timestamp +} + +@Injectable() +export class TimelineRedisService { + constructor(@InjectRedis() private readonly redis_client: Redis) {} + + private getQueueKey(user_id: string): string { + return `timeline:foryou:${user_id}`; + } + + /** + * Add tweets to the user's timeline queue + * @param user_id User ID + * @param tweets Array of tweet data to add + * @returns Number of items added + */ + async addToQueue(user_id: string, tweets: ITimelineTweetData[]): Promise { + if (tweets.length === 0) return 0; + + const key = this.getQueueKey(user_id); + const pipeline = this.redis_client.pipeline(); + + tweets.forEach((tweet) => { + const value = JSON.stringify(tweet); + pipeline.rpush(key, value); + }); + + const results = await pipeline.exec(); + if (!results) { + return 0; + } + + // Return the length after last push + const last_result = results[results.length - 1]; + const final_count = last_result && !last_result[0] ? (last_result[1] as number) : 0; + return final_count; + } + + /** + * Get tweets from the queue starting from a specific position + * @param user_id User ID + * @param start Start index (0-based) + * @param count Number of tweets to fetch + * @returns Array of tweet data + */ + async getFromQueue( + user_id: string, + start: number, + count: number + ): Promise { + const key = this.getQueueKey(user_id); + const end = start + count - 1; + + const items = await this.redis_client.lrange(key, start, end); + + return items.map((item) => JSON.parse(item) as ITimelineTweetData); + } + + /** + * Get the current size of the queue + * @param user_id User ID + * @returns Queue size + */ + async getQueueSize(user_id: string): Promise { + const key = this.getQueueKey(user_id); + return this.redis_client.llen(key); + } + + /** + * Check if a tweet exists in the queue + * @param user_id User ID + * @param tweet_id Tweet ID to check + * @returns True if tweet exists in queue + */ + async isTweetInQueue(user_id: string, tweet_id: string): Promise { + const key = this.getQueueKey(user_id); + const size = await this.redis_client.llen(key); + + // Fetch all items and check (for small queues this is acceptable) + const items = await this.redis_client.lrange(key, 0, size - 1); + + for (const item of items) { + const tweet: ITimelineTweetData = JSON.parse(item); + if (tweet.tweet_id === tweet_id) { + return true; + } + } + + return false; + } + + /** + * Remove tweets older than specified timestamp from the queue + * @param user_id User ID + * @param before_timestamp ISO timestamp - remove tweets created before this + * @returns Number of tweets removed + */ + async removeOldTweets(user_id: string, before_timestamp: string): Promise { + const key = this.getQueueKey(user_id); + const size = await this.redis_client.llen(key); + + if (size === 0) return 0; + + const items = await this.redis_client.lrange(key, 0, size - 1); + const pipeline = this.redis_client.pipeline(); + + let removed_count = 0; + + for (const item of items) { + const tweet: ITimelineTweetData = JSON.parse(item); + if (tweet.created_at < before_timestamp) { + pipeline.lrem(key, 1, item); + removed_count++; + } + } + + if (removed_count > 0) { + await pipeline.exec(); + } + + return removed_count; + } + + /** + * Clear the entire queue for a user + * @param user_id User ID + */ + async clearQueue(user_id: string): Promise { + const key = this.getQueueKey(user_id); + await this.redis_client.del(key); + } + + /** + * Initialize queue with tweets (replaces existing queue) + * @param user_id User ID + * @param tweets Array of tweet data + * @returns Queue size after initialization + */ + async initializeQueue(user_id: string, tweets: ITimelineTweetData[]): Promise { + await this.clearQueue(user_id); + return this.addToQueue(user_id, tweets); + } + + /** + * Get all tweet IDs in the queue (for checking duplicates) + * @param user_id User ID + * @returns Set of tweet IDs + */ + async getTweetIdsInQueue(user_id: string): Promise> { + const key = this.getQueueKey(user_id); + const size = await this.redis_client.llen(key); + const items = await this.redis_client.lrange(key, 0, size - 1); + + const tweet_ids = new Set(); + for (const item of items) { + const tweet: ITimelineTweetData = JSON.parse(item); + tweet_ids.add(tweet.tweet_id); + } + + return tweet_ids; + } + + /** + * Trim the queue to a maximum size by removing oldest entries (first added) + * @param user_id User ID + * @param max_size Maximum queue size to maintain + * @returns Number of items removed + * @example If queue has 7000 items and max_size is 5000, removes the first 2000 oldest tweets + */ + async trimQueue(user_id: string, max_size: number): Promise { + const key = this.getQueueKey(user_id); + const current_size = await this.redis_client.llen(key); + + if (current_size <= max_size) { + return 0; + } + + const to_remove = current_size - max_size; + + // Remove the oldest tweets (indices 0 to to_remove-1) + // LTRIM keeps items from 'to_remove' to end, effectively removing the first 'to_remove' items + // Example: size=7000, max=5000 → removes indices 0-1999, keeps indices 2000-6999 (newest 5000) + await this.redis_client.ltrim(key, to_remove, -1); + + return to_remove; + } +} diff --git a/src/timeline/timeline.controller.ts b/src/timeline/timeline.controller.ts index 33fcd5b1..f77eddbe 100644 --- a/src/timeline/timeline.controller.ts +++ b/src/timeline/timeline.controller.ts @@ -1,19 +1,9 @@ import { Controller, Get, Query, UseGuards } from '@nestjs/common'; import { TimelineService } from './timeline.service'; -import { - ApiBearerAuth, - ApiBody, - ApiOkResponse, - ApiOperation, - ApiQuery, - ApiTags, -} from '@nestjs/swagger'; +import { ApiBearerAuth, ApiOkResponse, ApiOperation, ApiQuery, ApiTags } from '@nestjs/swagger'; import { JwtAuthGuard } from 'src/auth/guards/jwt.guard'; import { TimelinePaginationDto } from './dto/timeline-pagination.dto'; -import { MentionsDto } from './dto/mentions.dto'; -import { TrendsDto } from './dto/trends.dto'; import { GetUserId } from 'src/decorators/get-userId.decorator'; -import { TimelineResponseDto } from './dto/timeline-response.dto'; import { ERROR_MESSAGES, SUCCESS_MESSAGES } from 'src/constants/swagger-messages'; import { ApiBadRequestErrorResponse, @@ -57,6 +47,7 @@ export class TimelineController { pagination.limit ); } + @ApiImplementationStatus({ status: ImplementationStatus.IMPLEMENTED, summary: timeline_swagger.following.operation.summary, @@ -76,21 +67,4 @@ export class TimelineController { ) { return await this.timeline_service.getFollowingTimeline(user_id, pagination); } - - @ApiOperation(timeline_swagger.mentions.operation) - @ApiOkResponse(timeline_swagger.responses.mentions_success) - @ApiUnauthorizedErrorResponse(ERROR_MESSAGES.INVALID_OR_EXPIRED_TOKEN) - @ApiBadRequestErrorResponse(ERROR_MESSAGES.INVALID_PAGINATION_PARAMETERS) - @ResponseMessage(SUCCESS_MESSAGES.MENTIONS_RETRIEVED) - @Get('/mentions') - async getMentions(@GetUserId() user_id: string, @Query() mentions: MentionsDto) {} - - @ApiOperation(timeline_swagger.trends.operation) - @ApiQuery(timeline_swagger.api_query.category) - @ApiOkResponse(timeline_swagger.responses.trends_success) - @ApiUnauthorizedErrorResponse(ERROR_MESSAGES.INVALID_OR_EXPIRED_TOKEN) - @ApiBadRequestErrorResponse(ERROR_MESSAGES.INVALID_CATEGORY_PARAMETER) - @ResponseMessage(SUCCESS_MESSAGES.TRENDS_RETRIEVED) - @Get('/trends') - async getTrends(@Query() trends: TrendsDto) {} } diff --git a/src/timeline/timeline.module.ts b/src/timeline/timeline.module.ts index 013e06f3..65ce742d 100644 --- a/src/timeline/timeline.module.ts +++ b/src/timeline/timeline.module.ts @@ -1,4 +1,4 @@ -import { Module } from '@nestjs/common'; +import { forwardRef, Module } from '@nestjs/common'; import { TimelineService } from './timeline.service'; import { TimelineController } from './timeline.controller'; import { TweetsRepository } from 'src/tweets/tweets.repository'; @@ -10,6 +10,12 @@ import { TweetCategory } from 'src/tweets/entities/tweet-category.entity'; import { ForyouService } from './services/foryou/for-you.service'; import { InterestsCandidateSource } from './services/foryou/canditate-sources/interests-source'; import { UserInterests } from 'src/user/entities/user-interests.entity'; +import { UserTimelineCursor } from 'src/user/entities/user-timeline-cursor.entity'; +import { TimelineRedisService } from './services/timeline-redis.service'; +import { TimelineCandidatesService } from './services/timeline-candidates.service'; +import { BackgroundJobsModule } from 'src/background-jobs/background-jobs.module'; +import { RedisModuleConfig } from 'src/redis/redis.module'; +import { Category } from 'src/category/entities'; @Module({ imports: [ @@ -20,7 +26,11 @@ import { UserInterests } from 'src/user/entities/user-interests.entity'; UserPostsView, TweetCategory, UserInterests, + UserTimelineCursor, + Category, ]), + forwardRef(() => BackgroundJobsModule), + RedisModuleConfig, ], controllers: [TimelineController], providers: [ @@ -29,6 +39,9 @@ import { UserInterests } from 'src/user/entities/user-interests.entity'; PaginationService, ForyouService, InterestsCandidateSource, + TimelineRedisService, + TimelineCandidatesService, ], + exports: [TimelineRedisService, TimelineCandidatesService], }) export class TimelineModule {} diff --git a/src/timeline/timeline.service.spec.ts b/src/timeline/timeline.service.spec.ts index 8cf9df8c..7b9a60f3 100644 --- a/src/timeline/timeline.service.spec.ts +++ b/src/timeline/timeline.service.spec.ts @@ -260,153 +260,4 @@ describe('TimelineService', () => { expect(Array.isArray(result.data)).toBe(true); }); }); - - describe('getForyouTimeline', () => { - it('should call tweet repository with correct parameters', async () => { - await service.getForyouTimeline(mock_user_id, mock_pagination); - - expect(tweet_repository.getForyouTweets).toHaveBeenCalledWith( - mock_user_id, - mock_pagination.cursor, - mock_pagination.limit - ); - expect(tweet_repository.getForyouTweets).toHaveBeenCalledTimes(1); - }); - - it('should return timeline response from repository', async () => { - const result = await service.getForyouTimeline(mock_user_id, mock_pagination); - - expect(result).toEqual(mock_timeline_response); - expect(result.data).toEqual(mock_timeline_response.data); - expect(result.pagination).toEqual(mock_timeline_response.pagination); - }); - - it('should extract cursor from pagination object', async () => { - const custom_pagination: TimelinePaginationDto = { - cursor: 'custom-cursor', - limit: 30, - since_id: undefined, - }; - - await service.getForyouTimeline(mock_user_id, custom_pagination); - - expect(tweet_repository.getForyouTweets).toHaveBeenCalledWith( - mock_user_id, - 'custom-cursor', - 30 - ); - }); - - it('should extract limit from pagination object', async () => { - const custom_pagination: TimelinePaginationDto = { - cursor: 'cursor-abc', - limit: 50, - since_id: undefined, - }; - - await service.getForyouTimeline(mock_user_id, custom_pagination); - - expect(tweet_repository.getForyouTweets).toHaveBeenCalledWith( - mock_user_id, - 'cursor-abc', - 50 - ); - }); - - it('should handle undefined cursor', async () => { - const pagination_without_cursor: TimelinePaginationDto = { - cursor: undefined, - limit: 20, - since_id: undefined, - }; - - await service.getForyouTimeline(mock_user_id, pagination_without_cursor); - - expect(tweet_repository.getForyouTweets).toHaveBeenCalledWith( - mock_user_id, - undefined, - 20 - ); - }); - - it('should handle undefined limit', async () => { - const pagination_without_limit: TimelinePaginationDto = { - cursor: 'cursor-abc', - limit: undefined, - since_id: undefined, - }; - - await service.getForyouTimeline(mock_user_id, pagination_without_limit); - - expect(tweet_repository.getForyouTweets).toHaveBeenCalledWith( - mock_user_id, - 'cursor-abc', - undefined - ); - }); - - it('should return empty data when no tweets available', async () => { - const empty_response = { - data: [], - pagination: { - next_cursor: null, - has_more: false, - }, - }; - tweet_repository.getForyouTweets.mockResolvedValue(empty_response); - - const result = await service.getForyouTimeline(mock_user_id, mock_pagination); - - expect(result.data).toEqual([]); - expect(result.pagination.has_more).toBe(false); - expect(result.pagination.next_cursor).toBeNull(); - }); - - it('should handle multiple tweets in response', async () => { - const multiple_tweets_response = { - data: [ - mock_tweet_response, - { ...mock_tweet_response, tweet_id: 'tweet-2' }, - { ...mock_tweet_response, tweet_id: 'tweet-3' }, - ], - pagination: { - next_cursor: 'next-cursor-789', - has_more: true, - }, - }; - tweet_repository.getForyouTweets.mockResolvedValue(multiple_tweets_response); - - const result = await service.getForyouTimeline(mock_user_id, mock_pagination); - - expect(result.data.length).toBe(3); - expect(result.data[0].tweet_id).toBe('tweet-1'); - expect(result.data[1].tweet_id).toBe('tweet-2'); - expect(result.data[2].tweet_id).toBe('tweet-3'); - }); - - it('should propagate errors from repository', async () => { - const error = new Error('Repository error'); - tweet_repository.getForyouTweets.mockRejectedValue(error); - - await expect(service.getForyouTimeline(mock_user_id, mock_pagination)).rejects.toThrow( - 'Repository error' - ); - }); - - it('should handle pagination with has_more false', async () => { - const response_with_no_more = { - data: [mock_tweet_response], - pagination: { - next_cursor: null, - has_more: false, - }, - }; - tweet_repository.getForyouTweets.mockResolvedValue(response_with_no_more); - - const result = await service.getForyouTimeline(mock_user_id, mock_pagination); - - expect(result.pagination.next_cursor).toBeNull(); - expect(result.pagination.has_more).toBe(false); - }); - }); }); diff --git a/src/timeline/timeline.service.ts b/src/timeline/timeline.service.ts index 37b626cf..837e5d4a 100644 --- a/src/timeline/timeline.service.ts +++ b/src/timeline/timeline.service.ts @@ -1,7 +1,6 @@ import { Injectable } from '@nestjs/common'; import { TimelinePaginationDto } from './dto/timeline-pagination.dto'; import { TweetsRepository } from 'src/tweets/tweets.repository'; -import { TimelineResponseDto } from './dto/timeline-response.dto'; import { TweetResponseDTO } from 'src/tweets/dto/tweet-response.dto'; @Injectable() @@ -20,17 +19,17 @@ export class TimelineService { pagination.limit ); } - async getForyouTimeline( - user_id: string, - pagination: TimelinePaginationDto - ): Promise<{ - data: TweetResponseDTO[]; - pagination: { next_cursor: string | null; has_more: boolean }; - }> { - return await this.tweet_repository.getForyouTweets( - user_id, - pagination.cursor, - pagination.limit - ); - } + // async getForyouTimeline( + // user_id: string, + // pagination: TimelinePaginationDto + // ): Promise<{ + // data: TweetResponseDTO[]; + // pagination: { next_cursor: string | null; has_more: boolean }; + // }> { + // return await this.tweet_repository.getForyouTweets( + // user_id, + // pagination.cursor, + // pagination.limit + // ); + // } } diff --git a/src/trend/dto/trends.dto.ts b/src/trend/dto/trends.dto.ts index d5d0217f..05f16a19 100644 --- a/src/trend/dto/trends.dto.ts +++ b/src/trend/dto/trends.dto.ts @@ -1,6 +1,6 @@ import { ApiProperty } from '@nestjs/swagger'; import { Type } from 'class-transformer'; -import { IsInt, IsNotEmpty, IsOptional, IsString, Max, MaxLength, Min } from 'class-validator'; +import { IsInt, IsOptional, IsString, Max, MaxLength, Min } from 'class-validator'; import { STRING_MAX_LENGTH } from 'src/constants/variables'; export class TrendsDto { diff --git a/src/trend/fake-trend.service.spec.ts b/src/trend/fake-trend.service.spec.ts new file mode 100644 index 00000000..523b8475 --- /dev/null +++ b/src/trend/fake-trend.service.spec.ts @@ -0,0 +1,630 @@ +import { Test, TestingModule } from '@nestjs/testing'; +import { FakeTrendService } from './fake-trend.service'; +import { getRepositoryToken } from '@nestjs/typeorm'; +import { DataSource, Repository } from 'typeorm'; +import { User } from 'src/user/entities/user.entity'; +import { TweetsService } from 'src/tweets/tweets.service'; +import { TrendDataConstants } from 'src/constants/variables'; +import * as bcrypt from 'bcrypt'; +import { TrendService } from './trend.service'; +import { Hashtag } from 'src/tweets/entities/hashtags.entity'; +import { Tweet } from 'src/tweets/entities'; +import { TweetHashtag } from 'src/tweets/entities/tweet-hashtag.entity'; + +jest.mock('bcrypt'); + +describe('FakeTrendService', () => { + let fake_trend_service: FakeTrendService; + let user_repo: Repository; + let tweets_service: TweetsService; + let trend_service: TrendService; + let hashtag_repo: Repository; + let tweet_hashtag_repo: Repository; + let data_source: DataSource; + + const mock_repo = (): Record => ({ + create: jest.fn(), + save: jest.fn(), + findOne: jest.fn(), + find: jest.fn(), + delete: jest.fn(), + update: jest.fn(), + preload: jest.fn(), + insert: jest.fn(), + increment: jest.fn(), + decrement: jest.fn(), + createQueryBuilder: jest.fn(), + }); + + const mock_user = { + id: 'trend-bot-id-123', + email: 'trend@yapper.test', + name: 'Trend Bot', + username: 'trendbot_', + password: 'hashed_password', + birth_date: new Date('2004-09-22'), + language: 'en' as const, + avatar_url: '', + created_at: new Date(), + updated_at: new Date(), + deleted_at: null, + }; + + beforeEach(async () => { + const mock_user_repo = mock_repo(); + const mock_tweets_service = { + createFakeTrendTweet: jest.fn().mockResolvedValue({}), + buildDefaultHashtagTopics: jest.fn().mockReturnValue({}), + deleteTweetsByUserId: jest.fn().mockResolvedValue(undefined), + }; + const mock_trend_service = {}; + const mock_hashtag_repo = mock_repo(); + const mock_tweet_hashtag_repo = mock_repo(); + const mock_data_source = {}; + + const module: TestingModule = await Test.createTestingModule({ + providers: [ + FakeTrendService, + { provide: getRepositoryToken(User), useValue: mock_user_repo }, + { provide: TweetsService, useValue: mock_tweets_service }, + { provide: TrendService, useValue: { mock_trend_service } }, + { provide: getRepositoryToken(Hashtag), useValue: mock_repo() }, + { provide: getRepositoryToken(TweetHashtag), useValue: mock_repo() }, + { provide: DataSource, useValue: mock_data_source }, + ], + }).compile(); + + fake_trend_service = module.get(FakeTrendService); + user_repo = mock_user_repo as unknown as Repository; + tweets_service = module.get(TweetsService); + trend_service = module.get(TrendService); + hashtag_repo = module.get>(getRepositoryToken(Hashtag)); + tweet_hashtag_repo = module.get>(getRepositoryToken(TweetHashtag)); + data_source = module.get(DataSource); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should be defined', () => { + expect(fake_trend_service).toBeDefined(); + expect(user_repo).toBeDefined(); + expect(tweets_service).toBeDefined(); + expect(trend_service).toBeDefined(); + expect(hashtag_repo).toBeDefined(); + expect(tweet_hashtag_repo).toBeDefined(); + }); + + describe('insertTrendBotIfNotExists', () => { + it('should return existing trend bot if it exists', async () => { + // Arrange + jest.spyOn(user_repo, 'findOne').mockResolvedValue(mock_user as any); + + // Act + const result = await (fake_trend_service as any).insertTrendBotIfNotExists(); + + // Assert + expect(result).toEqual(mock_user); + expect(user_repo.findOne).toHaveBeenCalledWith({ + where: { email: TrendDataConstants.TREND_BOT.email }, + }); + }); + + it('should create new trend bot if it does not exist', async () => { + // Arrange + const hashed_password = 'hashed_password_123'; + jest.spyOn(user_repo, 'findOne').mockResolvedValue(null); + (bcrypt.hash as jest.Mock).mockResolvedValue(hashed_password); + jest.spyOn(user_repo, 'create').mockReturnValue(mock_user as any); + jest.spyOn(user_repo, 'save').mockResolvedValue(mock_user as any); + + // Act + const result = await (fake_trend_service as any).insertTrendBotIfNotExists(); + + // Assert + expect(result).toEqual(mock_user); + expect(bcrypt.hash).toHaveBeenCalledWith(TrendDataConstants.TREND_BOT.password, 10); + expect(user_repo.create).toHaveBeenCalledWith({ + ...TrendDataConstants.TREND_BOT, + password: hashed_password, + }); + expect(user_repo.save).toHaveBeenCalledWith(mock_user); + }); + + it('should handle bcrypt hash error', async () => { + // Arrange + jest.spyOn(user_repo, 'findOne').mockResolvedValue(null); + const hash_error = new Error('Hash failed'); + (bcrypt.hash as jest.Mock).mockRejectedValue(hash_error); + + // Act & Assert + await expect((fake_trend_service as any).insertTrendBotIfNotExists()).rejects.toThrow( + hash_error + ); + }); + + it('should handle user save error', async () => { + // Arrange + jest.spyOn(user_repo, 'findOne').mockResolvedValue(null); + (bcrypt.hash as jest.Mock).mockResolvedValue('hashed_password'); + jest.spyOn(user_repo, 'create').mockReturnValue(mock_user as any); + const save_error = new Error('Save failed'); + jest.spyOn(user_repo, 'save').mockRejectedValue(save_error); + + // Act & Assert + await expect((fake_trend_service as any).insertTrendBotIfNotExists()).rejects.toThrow( + save_error + ); + }); + }); + + describe('createFakeTrendTweets', () => { + it('should create fake trend tweets successfully', async () => { + // Arrange + const trend_bot_id = 'trend-bot-id-123'; + const mock_hashtags = ['#sports', '#football', '#soccer']; + const mock_topics = { sports: { Sports: 100, Entertainment: 0, News: 0 } }; + + jest.spyOn(fake_trend_service as any, 'selectRandomHashtags').mockReturnValue({ + hashtags: mock_hashtags, + category: 'Sports', + }); + jest.spyOn(fake_trend_service as any, 'getRandomHashtagSelection').mockReturnValue({ + hashtags: ['#sports', '#football'], + category: 'Sports', + }); + jest.spyOn(fake_trend_service as any, 'buildTweetContent').mockReturnValue( + 'Check out these trending topics! #sports #football' + ); + jest.spyOn(tweets_service, 'buildDefaultHashtagTopics').mockReturnValue( + mock_topics as any + ); + jest.spyOn(tweets_service, 'createFakeTrendTweet').mockResolvedValue(undefined as any); + + // Act + await (fake_trend_service as any).createFakeTrendTweets(trend_bot_id); + + // Assert + expect(tweets_service.createFakeTrendTweet).toHaveBeenCalled(); + expect(tweets_service.buildDefaultHashtagTopics).toHaveBeenCalled(); + }); + + it('should continue creating tweets even if one fails', async () => { + // Arrange + const trend_bot_id = 'trend-bot-id-123'; + jest.spyOn(fake_trend_service as any, 'selectRandomHashtags').mockReturnValue({ + hashtags: ['#sports'], + category: 'Sports', + }); + jest.spyOn(fake_trend_service as any, 'getRandomHashtagSelection').mockReturnValue({ + hashtags: ['#sports'], + category: 'Sports', + }); + jest.spyOn(fake_trend_service as any, 'buildTweetContent').mockReturnValue( + 'Test content #sports' + ); + jest.spyOn(tweets_service, 'buildDefaultHashtagTopics').mockReturnValue({} as any); + + // First call fails, second succeeds + jest.spyOn(tweets_service, 'createFakeTrendTweet') + .mockRejectedValueOnce(new Error('Tweet creation failed')) + .mockResolvedValueOnce(undefined as any); + + // Act & Assert - should not throw + await expect( + (fake_trend_service as any).createFakeTrendTweets(trend_bot_id) + ).resolves.not.toThrow(); + }); + + it('should handle selectRandomHashtags error gracefully', async () => { + // Arrange + const trend_bot_id = 'trend-bot-id-123'; + const select_error = new Error('Selection failed'); + jest.spyOn(fake_trend_service as any, 'selectRandomHashtags').mockImplementation(() => { + throw select_error; + }); + + // Act & Assert - should not throw but log error + await expect( + (fake_trend_service as any).createFakeTrendTweets(trend_bot_id) + ).resolves.not.toThrow(); + }); + }); + + describe('selectRandomHashtags', () => { + it('should select hashtags from all three categories', () => { + // Act + const result = (fake_trend_service as any).selectRandomHashtags(); + + // Assert + expect(result.hashtags).toBeDefined(); + expect(Array.isArray(result.hashtags)).toBe(true); + expect(result.hashtags.length).toBeGreaterThan(0); + expect(result.hashtags.length).toBeLessThanOrEqual(100); + expect(result.category).toBeDefined(); + }); + + it('should include hashtags from sports category', () => { + // Act + const result = (fake_trend_service as any).selectRandomHashtags(); + + // Assert + const sports_count = result.hashtags.filter( + (tag: string) => + TrendDataConstants.SPORTS_TRENDS.includes(tag) || + TrendDataConstants.SPORTS_TRENDS.includes(tag.toLowerCase()) + ).length; + expect(sports_count).toBeGreaterThan(0); + }); + + it('should include hashtags from entertainment category', () => { + // Act + const result = (fake_trend_service as any).selectRandomHashtags(); + + // Assert + const entertainment_count = result.hashtags.filter( + (tag: string) => + TrendDataConstants.ENTERTAINMENT_TRENDS.includes(tag) || + TrendDataConstants.ENTERTAINMENT_TRENDS.includes(tag.toLowerCase()) + ).length; + expect(entertainment_count).toBeGreaterThan(0); + }); + + it('should include hashtags from news category', () => { + // Act + const result = (fake_trend_service as any).selectRandomHashtags(); + + // Assert + const news_count = result.hashtags.filter( + (tag: string) => + TrendDataConstants.NEWS_TRENDS.includes(tag) || + TrendDataConstants.NEWS_TRENDS.includes(tag.toLowerCase()) + ).length; + expect(news_count).toBeGreaterThan(0); + }); + }); + + describe('getRandomHashtagSelection', () => { + it('should select hashtags for Sports category', () => { + // Arrange + const all_hashtags = { + hashtags: ['#football', '#soccer', '#basketball'], + category: 'Sports' as const, + }; + + // Act + const result = (fake_trend_service as any).getRandomHashtagSelection(all_hashtags); + + // Assert + expect(result.hashtags).toBeDefined(); + expect(Array.isArray(result.hashtags)).toBe(true); + // Category is randomly selected from the 3 possible categories + expect(['Sports', 'Entertainment', 'News']).toContain(result.category); + // Should have up to 5 hashtags + expect(result.hashtags.length).toBeLessThanOrEqual(5); + }); + + it('should select hashtags for Entertainment category', () => { + // Arrange + const all_hashtags = { + hashtags: ['#movie', '#music', '#celebrity'], + category: 'Entertainment' as const, + }; + + // Act + const result = (fake_trend_service as any).getRandomHashtagSelection(all_hashtags); + + // Assert + expect(result.hashtags).toBeDefined(); + // Category is randomly selected, not necessarily Entertainment + expect(['Sports', 'Entertainment', 'News']).toContain(result.category); + expect(Array.isArray(result.hashtags)).toBe(true); + }); + + it('should select hashtags for News category', () => { + // Arrange + const all_hashtags = { + hashtags: ['#breaking', '#update', '#news'], + category: 'News' as const, + }; + + // Act + const result = (fake_trend_service as any).getRandomHashtagSelection(all_hashtags); + + // Assert + expect(result.hashtags).toBeDefined(); + // Category is randomly selected, not necessarily News + expect(['Sports', 'Entertainment', 'News']).toContain(result.category); + }); + + it('should select up to 5 hashtags per tweet', () => { + // Arrange + const all_hashtags = { + hashtags: Array.from({ length: 100 }, (_, i) => `#hashtag${i}`), + category: 'Sports' as const, + }; + + // Act + const result = (fake_trend_service as any).getRandomHashtagSelection(all_hashtags); + + // Assert + expect(result.hashtags.length).toBeLessThanOrEqual(5); + expect(result.hashtags.length).toBeGreaterThan(0); + }); + }); + + describe('getHashtagsByCategory', () => { + it('should return Sports hashtags for Sports category', () => { + // Act + const result = (fake_trend_service as any).getHashtagsByCategory('Sports'); + + // Assert + expect(result).toEqual(TrendDataConstants.SPORTS_TRENDS); + }); + + it('should return Entertainment hashtags for Entertainment category', () => { + // Act + const result = (fake_trend_service as any).getHashtagsByCategory('Entertainment'); + + // Assert + expect(result).toEqual(TrendDataConstants.ENTERTAINMENT_TRENDS); + }); + + it('should return News hashtags for News category', () => { + // Act + const result = (fake_trend_service as any).getHashtagsByCategory('News'); + + // Assert + expect(result).toEqual(TrendDataConstants.NEWS_TRENDS); + }); + + it('should return Sports hashtags for unknown category', () => { + // Act + const result = (fake_trend_service as any).getHashtagsByCategory('Unknown' as any); + + // Assert + expect(result).toEqual(TrendDataConstants.SPORTS_TRENDS); + }); + }); + + describe('getRandomItems', () => { + it('should return requested count of items', () => { + // Arrange + const items = ['item1', 'item2', 'item3', 'item4', 'item5']; + const count = 3; + + // Act + const result = (fake_trend_service as any).getRandomItems(items, count); + + // Assert + expect(result.length).toBeLessThanOrEqual(count); + expect(result.length).toBeGreaterThan(0); + }); + + it('should return all items if count exceeds array length', () => { + // Arrange + const items = ['item1', 'item2']; + const count = 10; + + // Act + const result = (fake_trend_service as any).getRandomItems(items, count); + + // Assert + expect(result.length).toBeLessThanOrEqual(items.length); + }); + + it('should return empty array for empty input', () => { + // Arrange + const items: string[] = []; + const count = 5; + + // Act + const result = (fake_trend_service as any).getRandomItems(items, count); + + // Assert + expect(result.length).toBe(0); + }); + + it('should not modify original array', () => { + // Arrange + const items = ['item1', 'item2', 'item3']; + const items_copy = [...items]; + + // Act + (fake_trend_service as any).getRandomItems(items, 2); + + // Assert + expect(items).toEqual(items_copy); + }); + }); + + describe('buildTweetContent', () => { + it('should build tweet content with hashtags', () => { + // Arrange + const hashtags = ['#sports', '#football', '#soccer']; + + // Act + const result = (fake_trend_service as any).buildTweetContent(hashtags); + + // Assert + expect(result).toBeDefined(); + expect(typeof result).toBe('string'); + hashtags.forEach((hashtag) => { + expect(result).toContain(hashtag); + }); + }); + + it('should use different templates', () => { + // Arrange + const hashtags = ['#sports', '#football']; + const templates = new Set(); + + // Act - Generate multiple templates + for (let i = 0; i < 20; i++) { + const content = (fake_trend_service as any).buildTweetContent(hashtags); + templates.add(content); + } + + // Assert - Should have variety (accounting for randomness) + expect(templates.size).toBeGreaterThan(1); + }); + + it('should include all provided hashtags', () => { + // Arrange + const hashtags = ['#test1', '#test2', '#test3']; + + // Act + const result = (fake_trend_service as any).buildTweetContent(hashtags); + + // Assert + hashtags.forEach((hashtag) => { + expect(result).toContain(hashtag); + }); + }); + + it('should produce valid tweet content', () => { + // Arrange + const hashtags = ['#sports']; + + // Act + const result = (fake_trend_service as any).buildTweetContent(hashtags); + + // Assert + expect(result).toBeTruthy(); + expect(result.length).toBeGreaterThan(0); + }); + }); + + describe('deleteFakeTrends', () => { + it('should delete fake trends for trend bot', async () => { + // Arrange + (user_repo.findOne as jest.Mock).mockResolvedValue(mock_user as any); + (tweets_service.deleteTweetsByUserId as jest.Mock).mockResolvedValue(undefined); + + // Act + await fake_trend_service.deleteFakeTrends(); + + // Assert + expect(user_repo.findOne).toHaveBeenCalledWith({ + where: { email: TrendDataConstants.TREND_BOT.email }, + }); + expect(tweets_service.deleteTweetsByUserId).toHaveBeenCalledWith(mock_user.id); + }); + + it('should handle when trend bot does not exist', async () => { + // Arrange + (user_repo.findOne as jest.Mock).mockResolvedValue(null); + + // Act & Assert - should not throw + await expect(fake_trend_service.deleteFakeTrends()).resolves.not.toThrow(); + expect(tweets_service.deleteTweetsByUserId).not.toHaveBeenCalled(); + }); + + it('should handle deleteTweetsByUserId error', async () => { + // Arrange + (user_repo.findOne as jest.Mock).mockResolvedValue(mock_user as any); + const delete_error = new Error('Delete failed'); + (tweets_service.deleteTweetsByUserId as jest.Mock).mockRejectedValue(delete_error); + + // Act & Assert - should not throw but log error + await expect(fake_trend_service.deleteFakeTrends()).resolves.not.toThrow(); + }); + + it('should handle findOne error gracefully', async () => { + // Arrange + const find_error = new Error('Find failed'); + (user_repo.findOne as jest.Mock).mockRejectedValue(find_error); + + // Act & Assert - should not throw but log error + await expect(fake_trend_service.deleteFakeTrends()).resolves.not.toThrow(); + }); + }); + + describe('fakeTrends (cron job)', () => { + it('should call insertTrendBotIfNotExists and createFakeTrendTweets', async () => { + // Arrange + jest.spyOn(fake_trend_service as any, 'insertTrendBotIfNotExists').mockResolvedValue( + mock_user + ); + jest.spyOn(fake_trend_service as any, 'createFakeTrendTweets').mockResolvedValue( + undefined + ); + + // Act + await fake_trend_service.fakeTrends(); + + // Assert + expect((fake_trend_service as any).insertTrendBotIfNotExists).toHaveBeenCalled(); + expect((fake_trend_service as any).createFakeTrendTweets).toHaveBeenCalledWith( + mock_user.id + ); + }); + + it('should handle insertTrendBotIfNotExists error', async () => { + // Arrange + const bot_error = new Error('Bot creation failed'); + jest.spyOn(fake_trend_service as any, 'insertTrendBotIfNotExists').mockRejectedValue( + bot_error + ); + + // Act & Assert - should not throw but log error + await expect(fake_trend_service.fakeTrends()).resolves.not.toThrow(); + }); + + it('should handle createFakeTrendTweets error', async () => { + // Arrange + jest.spyOn(fake_trend_service as any, 'insertTrendBotIfNotExists').mockResolvedValue( + mock_user + ); + const tweet_error = new Error('Tweet creation failed'); + jest.spyOn(fake_trend_service as any, 'createFakeTrendTweets').mockRejectedValue( + tweet_error + ); + + // Act & Assert - should not throw but log error + await expect(fake_trend_service.fakeTrends()).resolves.not.toThrow(); + }); + }); + + describe('Integration scenarios', () => { + it('should complete full fake trend creation workflow', async () => { + // Arrange + (user_repo.findOne as jest.Mock).mockResolvedValue(mock_user as any); + (tweets_service.buildDefaultHashtagTopics as jest.Mock).mockReturnValue({} as any); + (tweets_service.createFakeTrendTweet as jest.Mock).mockResolvedValue(undefined as any); + + // Act + await fake_trend_service.fakeTrends(); + + // Assert + expect(user_repo.findOne).toHaveBeenCalled(); + expect(tweets_service.buildDefaultHashtagTopics).toHaveBeenCalled(); + expect(tweets_service.createFakeTrendTweet).toHaveBeenCalled(); + }); + + it('should handle hashtag topics generation correctly', () => { + // Arrange + const mock_topics = { + sports: { Sports: 100, Entertainment: 0, News: 0 }, + }; + (tweets_service.buildDefaultHashtagTopics as jest.Mock).mockReturnValue( + mock_topics as any + ); + + const hashtags = ['#sports', '#football']; + const category = 'Sports'; + + // Act + const result = tweets_service.buildDefaultHashtagTopics(hashtags, category); + + // Assert + expect(tweets_service.buildDefaultHashtagTopics).toHaveBeenCalledWith( + hashtags, + category + ); + expect(result).toBeDefined(); + expect(result).toEqual(mock_topics); + }); + }); +}); diff --git a/src/trend/fake-trend.service.ts b/src/trend/fake-trend.service.ts new file mode 100644 index 00000000..61c25ca4 --- /dev/null +++ b/src/trend/fake-trend.service.ts @@ -0,0 +1,334 @@ +import { Injectable, Logger } from '@nestjs/common'; +import { InjectRepository } from '@nestjs/typeorm'; +import { DataSource, Repository } from 'typeorm'; +import { TweetsService } from 'src/tweets/tweets.service'; +import { User } from 'src/user/entities/user.entity'; +import { TrendDataConstants } from 'src/constants/variables'; +import * as bcrypt from 'bcrypt'; +import { Hashtag } from 'src/tweets/entities/hashtags.entity'; +import { TweetHashtag } from 'src/tweets/entities/tweet-hashtag.entity'; +import { TrendService } from './trend.service'; +import { HashtagJobDto } from 'src/background-jobs/hashtag/hashtag-job.dto'; + +interface IFakeTrendHashtags { + hashtags: string[]; + category: 'Sports' | 'Entertainment' | 'News'; +} + +@Injectable() +export class FakeTrendService { + private readonly logger = new Logger(FakeTrendService.name); + private readonly HASHTAGS_PER_CATEGORY = 33; // ~100 hashtags total from 3 categories + private readonly TWEETS_TO_CREATE = 10; // Number of fake trend tweets to create + + constructor( + private readonly tweets_service: TweetsService, + private readonly trend_service: TrendService, + + @InjectRepository(User) + private readonly user_repository: Repository, + @InjectRepository(Hashtag) + private readonly hashtag_repository: Repository, + private readonly data_source: DataSource, + @InjectRepository(TweetHashtag) + private readonly tweet_hashtags_repository: Repository + ) {} + + // Every 20 minutes + // @Cron('*/20 * * * *', { + // name: 'fake-trends-job', + // timeZone: 'UTC', + // }) + async fakeTrends(): Promise { + try { + const trend_bot = await this.insertTrendBotIfNotExists(); + await this.createFakeTrendTweets(trend_bot.id); + } catch (error) { + this.logger.error('Error in fakeTrends cron job:', error); + } + } + + async deleteFakeTrends(): Promise { + try { + const trend_bot = await this.user_repository.findOne({ + where: { email: TrendDataConstants.TREND_BOT.email }, + }); + + if (!trend_bot) { + this.logger.log('No Trend Bot found to delete tweets for.'); + return; + } + + await this.tweets_service.deleteTweetsByUserId(trend_bot.id); + this.logger.log(`Deleted fake trend tweets created by Trend Bot.`); + } catch (error) { + this.logger.error('Error deleting fake trend tweets:', error); + } + } + + private async insertTrendBotIfNotExists(): Promise { + const trend_bot_data = TrendDataConstants.TREND_BOT; + + // Check if trend bot already exists + let trend_bot = await this.user_repository.findOne({ + where: { email: trend_bot_data.email }, + }); + + if (trend_bot) { + this.logger.log('Trend Bot already exists'); + return trend_bot; + } + + // Create trend bot if it doesn't exist + const hashed_password = await bcrypt.hash(trend_bot_data.password, 10); + const new_trend_bot = this.user_repository.create({ + ...trend_bot_data, + password: hashed_password, + }); + + trend_bot = await this.user_repository.save(new_trend_bot); + this.logger.log('Trend Bot created successfully'); + + return trend_bot; + } + + private async createFakeTrendTweets(trend_bot_id: string): Promise { + try { + // Select random hashtags from each category + const selected_hashtags = this.selectRandomHashtags(); + + // Create tweets with selected hashtags + for (let i = 0; i < this.TWEETS_TO_CREATE; i++) { + const hashtag_selection = this.getRandomHashtagSelection(selected_hashtags); + const content = this.buildTweetContent(hashtag_selection.hashtags); + + // Build hashtag topics for the selected category + const hashtag_topics = this.tweets_service.buildDefaultHashtagTopics( + hashtag_selection.hashtags, + hashtag_selection.category + ); + + try { + await this.tweets_service.createFakeTrendTweet( + content, + trend_bot_id, + hashtag_topics + ); + + this.logger.log( + `Created fake trend tweet #${i + 1} with ${hashtag_selection.category} category` + ); + } catch (error) { + this.logger.warn( + `Failed to create fake trend tweet #${i + 1}:`, + (error as Error).message + ); + } + } + } catch (error) { + this.logger.error('Error creating fake trend tweets:', error); + } + } + + private selectRandomHashtags(): IFakeTrendHashtags { + const sports_trends = TrendDataConstants.SPORTS_TRENDS; + const entertainment_trends = TrendDataConstants.ENTERTAINMENT_TRENDS; + const news_trends = TrendDataConstants.NEWS_TRENDS; + + const selected: IFakeTrendHashtags = { + hashtags: [], + category: 'Sports', + }; + + // Select random hashtags from each category + selected.hashtags.push( + ...this.getRandomItems(sports_trends, this.HASHTAGS_PER_CATEGORY), + ...this.getRandomItems(entertainment_trends, this.HASHTAGS_PER_CATEGORY), + ...this.getRandomItems(news_trends, this.HASHTAGS_PER_CATEGORY) + ); + + return selected; + } + + private getRandomHashtagSelection(all_hashtags: IFakeTrendHashtags): { + hashtags: string[]; + category: 'Sports' | 'Entertainment' | 'News'; + } { + const categories: Array<'Sports' | 'Entertainment' | 'News'> = [ + 'Sports', + 'Entertainment', + 'News', + ]; + const random_category = categories[Math.floor(Math.random() * categories.length)]; + + // Select random hashtags based on category + const category_hashtags = this.getHashtagsByCategory(random_category); + const selected_hashtags = this.getRandomItems(category_hashtags, 5); // 5 hashtags per tweet + + return { + hashtags: selected_hashtags, + category: random_category, + }; + } + + private getHashtagsByCategory(category: 'Sports' | 'Entertainment' | 'News'): string[] { + switch (category) { + case 'Sports': + return TrendDataConstants.SPORTS_TRENDS; + case 'Entertainment': + return TrendDataConstants.ENTERTAINMENT_TRENDS; + case 'News': + return TrendDataConstants.NEWS_TRENDS; + default: + return TrendDataConstants.SPORTS_TRENDS; + } + } + + private getRandomItems(array: T[], count: number): T[] { + const shuffled = [...array].sort(() => Math.random() - 0.5); + return shuffled.slice(0, Math.min(count, array.length)); + } + + private buildTweetContent(hashtags: string[]): string { + const templates = [ + `Check out these trending topics! ${hashtags.join(' ')}`, + `Don't miss out on what's trending right now ${hashtags.join(' ')}`, + `The hottest trends today ${hashtags.join(' ')}`, + `Join the conversation ${hashtags.join(' ')}`, + `Stay updated with these trends ${hashtags.join(' ')}`, + `Latest trending: ${hashtags.join(' ')}`, + `What's hot in the feed ${hashtags.join(' ')}`, + `Catch the latest buzz ${hashtags.join(' ')}`, + ]; + + const random_template = templates[Math.floor(Math.random() * templates.length)]; + return random_template; + } + + async seedTrend(): Promise { + // UPDATE TWEET TIMESTAMP TO LAST 6 HOURS + await this.data_source.query(` + UPDATE tweets + SET created_at = NOW() - (random() * interval '6 hours') + `); + + console.log('Updated tweet timestamps to last 6 hours DONE'); + + await this.data_source.query(` + UPDATE tweet_hashtags + SET tweet_created_at = t.created_at + FROM tweets t + WHERE tweet_hashtags.tweet_id = t.tweet_id + `); + + console.log('Updated tweet_hashtags timestamps to match tweets DONE'); + + // SELECT TOP HASHTAGS FROM EACH CATEGORY + const sports_hashtags = await this.hashtag_repository.find({ + where: { category: 'Sports' }, + order: { usage_count: 'DESC' }, + take: 30, + }); + + const entertainment_hashtags = await this.hashtag_repository.find({ + where: { category: 'Entertainment' }, + order: { usage_count: 'DESC' }, + take: 30, + }); + + const news_hashtags = await this.hashtag_repository.find({ + where: { category: 'News' }, + order: { usage_count: 'DESC' }, + take: 20, + }); + + console.log('Fetched top hashtags from each category DONE'); + + const all_hashtags = [ + ...sports_hashtags.map((h) => ({ ...h, category: 'Sports' })), + ...entertainment_hashtags.map((h) => ({ ...h, category: 'Entertainment' })), + ...news_hashtags.map((h) => ({ ...h, category: 'News' })), + ]; + + const hashtag_names = all_hashtags.map((h) => h.name); + + const tweet_hashtag_data = await this.data_source.query( + ` + SELECT + th.hashtag_name, + th.tweet_created_at, + h.category + FROM tweet_hashtags th + JOIN hashtag h ON th.hashtag_name = h.name + WHERE th.hashtag_name = ANY($1) + ORDER BY th.tweet_created_at DESC + `, + [hashtag_names] + ); + + console.log('Fetched tweet hashtag timestamp DONE'); + + // Group by tweet timestamp + const timestamp_map = new Map>>(); + + for (const row of tweet_hashtag_data) { + const timestamp = new Date(row.tweet_created_at).getTime(); + const hashtag_name = row.hashtag_name; + const category = row.category; + + if (!timestamp_map.has(timestamp)) { + timestamp_map.set(timestamp, new Map()); + } + + const hashtag_map = timestamp_map.get(timestamp); + + if (hashtag_map) { + if (!hashtag_map.has(hashtag_name)) { + hashtag_map.set(hashtag_name, {}); + } + + const categories = hashtag_map.get(hashtag_name); + if (categories) { + categories[category] = 100; + } + } + } + + console.log(`Processing ${timestamp_map.size} unique timestamps`); + + const BATCH_SIZE = 50; + const timestamps = Array.from(timestamp_map.entries()); + + for (let i = 0; i < timestamps.length; i += BATCH_SIZE) { + const batch = timestamps.slice(i, i + BATCH_SIZE); + + console.log( + `Processing batch ${Math.floor(i / BATCH_SIZE) + 1}/${Math.ceil(timestamps.length / BATCH_SIZE)}` + ); + + await Promise.all( + batch.map(async ([timestamp, hashtag_map]) => { + const hashtags: Record> = {}; + + for (const [hashtag_name, categories] of hashtag_map.entries()) { + hashtags[hashtag_name] = categories; + } + + const job_data: HashtagJobDto = { + hashtags, + timestamp, + }; + + // Execute all three operations in parallel for each timestamp + await Promise.all([ + this.trend_service.insertCandidateHashtags(job_data), + this.trend_service.updateHashtagCounts(job_data), + this.trend_service.insertCandidateCategories(job_data), + ]); + }) + ); + } + + console.log(`Seeded trends for ${timestamp_map.size} unique timestamps DONE`); + } +} diff --git a/src/trend/trend.controller.spec.ts b/src/trend/trend.controller.spec.ts index e77920bd..a08eebb9 100644 --- a/src/trend/trend.controller.spec.ts +++ b/src/trend/trend.controller.spec.ts @@ -1,6 +1,7 @@ import { Test, TestingModule } from '@nestjs/testing'; import { TrendController } from './trend.controller'; import { TrendService } from './trend.service'; +import { FakeTrendService } from './fake-trend.service'; import { TrendsDto } from './dto/trends.dto'; describe('TrendController', () => { @@ -17,6 +18,18 @@ describe('TrendController', () => { calculateHashtagScore: jest.fn(), }; + const mock_fake_trend_service = { + fakeTrends: jest.fn(), + deleteFakeTrends: jest.fn(), + insertTrendBotIfNotExists: jest.fn(), + createFakeTrendTweets: jest.fn(), + selectRandomHashtags: jest.fn(), + getRandomHashtagSelection: jest.fn(), + getHashtagsByCategory: jest.fn(), + getRandomItems: jest.fn(), + buildTweetContent: jest.fn(), + }; + beforeEach(async () => { const module: TestingModule = await Test.createTestingModule({ controllers: [TrendController], @@ -25,6 +38,10 @@ describe('TrendController', () => { provide: TrendService, useValue: mock_trend_service, }, + { + provide: FakeTrendService, + useValue: mock_fake_trend_service, + }, ], }).compile(); @@ -43,16 +60,18 @@ describe('TrendController', () => { const mock_response = { data: [ { - hashtag: '#javascript', + text: '#javascript', posts_count: 1500, - rank: 1, + trend_rank: 1, category: 'News', + reference_id: 'javascript', }, { - hashtag: '#typescript', + text: '#typescript', posts_count: 1200, - rank: 2, + trend_rank: 2, category: 'Entertainment', + reference_id: 'typescript', }, ], }; @@ -64,7 +83,7 @@ describe('TrendController', () => { expect(service.getTrending).toHaveBeenCalledWith(undefined, undefined); expect(result).toEqual(mock_response); expect(result.data).toHaveLength(2); - expect(result.data[0].hashtag).toBe('#javascript'); + expect(result.data[0].text).toBe('#javascript'); }); it('should return trending data for specific category', async () => { @@ -72,10 +91,11 @@ describe('TrendController', () => { const mock_response = { data: [ { - hashtag: '#football', + text: '#football', posts_count: 2000, - rank: 1, + trend_rank: 1, category: 'Sports', + reference_id: 'football', }, ], }; @@ -100,14 +120,14 @@ describe('TrendController', () => { }); it('should handle custom limit', async () => { - const trends_dto: TrendsDto = { limit: 50 }; + const trends_dto: TrendsDto = { category: 'Entertainment', limit: 50 }; const mock_response = { data: [] }; mock_trend_service.getTrending.mockResolvedValue(mock_response); await controller.getTrending(trends_dto); - expect(service.getTrending).toHaveBeenCalledWith(undefined, 50); + expect(service.getTrending).toHaveBeenCalledWith('Entertainment', 50); }); it('should return empty data when no trends found', async () => { @@ -149,16 +169,18 @@ describe('TrendController', () => { const mock_response = { data: [ { - hashtag: '#nodejs', + text: '#nodejs', posts_count: 1000, - rank: 1, + trend_rank: 1, category: 'News', + reference_id: 'nodejs', }, { - hashtag: '#react', + text: '#react', posts_count: 950, - rank: 2, + trend_rank: 2, category: 'Only on Yapper', + reference_id: 'react', }, ], }; @@ -168,19 +190,38 @@ describe('TrendController', () => { const result = await controller.getTrending(); result.data.forEach((trend) => { - expect(trend).toHaveProperty('hashtag'); + expect(trend).toHaveProperty('text'); expect(trend).toHaveProperty('posts_count'); - expect(trend).toHaveProperty('rank'); + expect(trend).toHaveProperty('trend_rank'); expect(trend).toHaveProperty('category'); + expect(trend).toHaveProperty('reference_id'); }); }); it('should preserve order and ranking from service', async () => { const mock_response = { data: [ - { hashtag: '#first', posts_count: 1000, rank: 1, category: 'News' }, - { hashtag: '#second', posts_count: 900, rank: 2, category: 'News' }, - { hashtag: '#third', posts_count: 800, rank: 3, category: 'News' }, + { + text: '#first', + posts_count: 1000, + trend_rank: 1, + category: 'News', + reference_id: 'first', + }, + { + text: '#second', + posts_count: 900, + trend_rank: 2, + category: 'News', + reference_id: 'second', + }, + { + text: '#third', + posts_count: 800, + trend_rank: 3, + category: 'News', + reference_id: 'third', + }, ], }; @@ -188,9 +229,9 @@ describe('TrendController', () => { const result = await controller.getTrending(); - expect(result.data[0].rank).toBe(1); - expect(result.data[1].rank).toBe(2); - expect(result.data[2].rank).toBe(3); + expect(result.data[0].trend_rank).toBe(1); + expect(result.data[1].trend_rank).toBe(2); + expect(result.data[2].trend_rank).toBe(3); }); it('should handle service errors', async () => { @@ -201,4 +242,220 @@ describe('TrendController', () => { await expect(controller.getTrending()).rejects.toThrow('Service error'); }); }); + + describe('Edge Cases', () => { + it('should handle very large limit parameter', async () => { + const large_limit = 1000; + const mock_response = { + data: [], + }; + + mock_trend_service.getTrending.mockResolvedValue(mock_response); + + const result = await controller.getTrending({ limit: large_limit } as TrendsDto); + + expect(result).toEqual(mock_response); + expect(mock_trend_service.getTrending).toHaveBeenCalledWith(undefined, large_limit); + }); + + it('should handle zero limit parameter', async () => { + const mock_response = { + data: [], + }; + + mock_trend_service.getTrending.mockResolvedValue(mock_response); + + const result = await controller.getTrending({ limit: 0 } as TrendsDto); + + expect(result).toEqual(mock_response); + }); + + it('should handle negative limit as absolute value', async () => { + const mock_response = { + data: [], + }; + + mock_trend_service.getTrending.mockResolvedValue(mock_response); + + const result = await controller.getTrending({ limit: -10 } as TrendsDto); + + expect(result).toEqual(mock_response); + }); + + it('should handle null/undefined category gracefully', async () => { + const mock_response = { + data: [ + { + text: '#javascript', + posts_count: 1500, + trend_rank: 1, + category: 'News', + reference_id: 'javascript', + }, + ], + }; + + mock_trend_service.getTrending.mockResolvedValue(mock_response); + + const result = await controller.getTrending({ category: null, limit: 10 } as any); + + expect(result).toEqual(mock_response); + expect(mock_trend_service.getTrending).toHaveBeenCalledWith(null, 10); + }); + + it('should handle special characters in category parameter', async () => { + const mock_response = { + data: [], + }; + + mock_trend_service.getTrending.mockResolvedValue(mock_response); + + const special_category = '#$%@!'; + const result = await controller.getTrending({ + category: special_category, + limit: 10, + } as TrendsDto); + + expect(result).toEqual(mock_response); + expect(mock_trend_service.getTrending).toHaveBeenCalledWith(special_category, 10); + }); + }); + + describe('Response Validation', () => { + it('should return response with all required fields', async () => { + const mock_response = { + data: [ + { + text: '#javascript', + posts_count: 1500, + trend_rank: 1, + category: 'News', + reference_id: 'javascript', + }, + ], + }; + + mock_trend_service.getTrending.mockResolvedValue(mock_response); + + const result = await controller.getTrending(); + + expect(result).toHaveProperty('data'); + expect(Array.isArray(result.data)).toBe(true); + expect(result.data[0]).toHaveProperty('text'); + expect(result.data[0]).toHaveProperty('posts_count'); + expect(result.data[0]).toHaveProperty('trend_rank'); + expect(result.data[0]).toHaveProperty('category'); + expect(result.data[0]).toHaveProperty('reference_id'); + }); + + it('should maintain consistent data structure across multiple calls', async () => { + const mock_response = { + data: [ + { + text: '#test1', + posts_count: 100, + trend_rank: 1, + category: 'Sports', + reference_id: 'test1', + }, + { + text: '#test2', + posts_count: 90, + trend_rank: 2, + category: 'News', + reference_id: 'test2', + }, + ], + }; + + mock_trend_service.getTrending.mockResolvedValue(mock_response); + + const result1 = await controller.getTrending(); + const result2 = await controller.getTrending({ + category: 'News', + limit: 5, + } as TrendsDto); + + expect(result1.data).toHaveLength(2); + expect(result2.data).toHaveLength(2); + expect(result1.data[0]).toHaveProperty('text'); + expect(result2.data[0]).toHaveProperty('text'); + }); + + it('should return empty data array when no trends match filter', async () => { + const mock_response = { + data: [], + }; + + mock_trend_service.getTrending.mockResolvedValue(mock_response); + + const result = await controller.getTrending({ + category: 'Sports', + limit: 10, + } as TrendsDto); + + expect(result.data).toEqual([]); + expect(result.data.length).toBe(0); + }); + }); + + describe('Service Integration', () => { + it('should pass category parameter correctly to service', async () => { + const test_category = 'Entertainment'; + mock_trend_service.getTrending.mockResolvedValue({ + data: [], + }); + + await controller.getTrending({ category: test_category, limit: 10 } as TrendsDto); + + expect(mock_trend_service.getTrending).toHaveBeenCalledWith(test_category, 10); + }); + + it('should pass limit parameter correctly to service', async () => { + const test_limit = 25; + mock_trend_service.getTrending.mockResolvedValue({ + data: [], + }); + + await controller.getTrending({ category: 'News', limit: test_limit } as TrendsDto); + + expect(mock_trend_service.getTrending).toHaveBeenCalledWith('News', test_limit); + }); + + it('should handle service returning empty data', async () => { + const empty_response = { + data: [], + }; + + mock_trend_service.getTrending.mockResolvedValue(empty_response); + + const result = await controller.getTrending(); + + expect(result.data).toEqual([]); + expect(result.data.length).toBe(0); + }); + + it('should not modify service response', async () => { + const original_response = { + data: [ + { + text: '#original', + posts_count: 500, + trend_rank: 1, + category: 'News', + reference_id: 'original', + }, + ], + }; + + mock_trend_service.getTrending.mockResolvedValue(original_response); + + const result = await controller.getTrending(); + + // Verify response is returned as-is without modification + expect(result).toEqual(original_response); + expect(result.data[0].text).toBe('#original'); + expect(result.data[0].reference_id).toBe('original'); + }); + }); }); diff --git a/src/trend/trend.controller.ts b/src/trend/trend.controller.ts index 0d288549..f72c1510 100644 --- a/src/trend/trend.controller.ts +++ b/src/trend/trend.controller.ts @@ -1,14 +1,20 @@ -import { Controller, Get, Query } from '@nestjs/common'; +import { Controller, Delete, Get, Post, Query, UseGuards } from '@nestjs/common'; import { TrendService } from './trend.service'; -import { ApiOkResponse, ApiOperation, ApiQuery } from '@nestjs/swagger'; +import { ApiBearerAuth, ApiOkResponse, ApiOperation, ApiQuery } from '@nestjs/swagger'; import { SUCCESS_MESSAGES } from 'src/constants/swagger-messages'; import { ResponseMessage } from 'src/decorators/response-message.decorator'; import { trending_swagger } from 'src/explore/explore.swagger'; import { TrendsDto } from './dto/trends.dto'; - +import { JwtAuthGuard } from 'src/auth/guards/jwt.guard'; +import { FakeTrendService } from './fake-trend.service'; +@ApiBearerAuth('JWT-auth') +@UseGuards(JwtAuthGuard) @Controller('trend') export class TrendController { - constructor(private readonly trend_service: TrendService) {} + constructor( + private readonly trend_service: TrendService, + private readonly fake_trend_service: FakeTrendService + ) {} @ApiOperation(trending_swagger.operation) @ApiOkResponse(trending_swagger.responses.success) @@ -19,4 +25,24 @@ export class TrendController { async getTrending(@Query() trends_dto?: TrendsDto) { return await this.trend_service.getTrending(trends_dto?.category, trends_dto?.limit); } + + @Get('/calculate-trends') + async calculateTrends() { + return await this.trend_service.calculateTrend(); + } + + @Post('/fake-trends') + async fakeTrends() { + return await this.fake_trend_service.fakeTrends(); + } + + @Delete('/fake-trends') + async deleteFakeTrends() { + return await this.fake_trend_service.deleteFakeTrends(); + } + + @Post('/seed-trends') + async seedTrends() { + return await this.fake_trend_service.seedTrend(); + } } diff --git a/src/trend/trend.module.ts b/src/trend/trend.module.ts index 4810dd88..2793ef99 100644 --- a/src/trend/trend.module.ts +++ b/src/trend/trend.module.ts @@ -1,16 +1,23 @@ -import { Module } from '@nestjs/common'; +import { forwardRef, Module } from '@nestjs/common'; import { TrendService } from './trend.service'; import { TrendController } from './trend.controller'; import { RedisService } from 'src/redis/redis.service'; import { VelocityExponentialDetector } from './velocity-exponential-detector'; import { Hashtag } from 'src/tweets/entities/hashtags.entity'; import { TypeOrmModule } from '@nestjs/typeorm'; +import { FakeTrendService } from './fake-trend.service'; +import { User } from 'src/user/entities/user.entity'; +import { TweetsModule } from 'src/tweets/tweets.module'; +import { TweetHashtag } from 'src/tweets/entities/tweet-hashtag.entity'; @Module({ controllers: [TrendController], - imports: [TypeOrmModule.forFeature([Hashtag])], + imports: [ + TypeOrmModule.forFeature([Hashtag, User, TweetHashtag]), + forwardRef(() => TweetsModule), + ], - providers: [TrendService, RedisService, VelocityExponentialDetector], - exports: [TrendService], + providers: [TrendService, RedisService, VelocityExponentialDetector, FakeTrendService], + exports: [TrendService, FakeTrendService], }) export class TrendModule {} diff --git a/src/trend/trend.service.spec.ts b/src/trend/trend.service.spec.ts index d3c75ea8..f69e31a2 100644 --- a/src/trend/trend.service.spec.ts +++ b/src/trend/trend.service.spec.ts @@ -230,7 +230,7 @@ describe('TrendService', () => { expect.any(Number), expect.any(String) ); - expect(redis_service.expire).toHaveBeenCalledWith('candidates:active', 2 * 60 * 60); + expect(redis_service.expire).toHaveBeenCalledWith('candidates:active', 24 * 60 * 60); }); }); @@ -301,7 +301,7 @@ describe('TrendService', () => { await trend_service.updateHashtagCounts(hashtag_job); expect(redis_service.zincrby).toHaveBeenCalled(); - expect(redis_service.expire).toHaveBeenCalledWith('hashtag:#trending', 1 * 60 * 60); + expect(redis_service.expire).toHaveBeenCalledWith('hashtag:#trending', 24 * 60 * 60); expect(mock_pipeline.exec).toHaveBeenCalled(); }); }); @@ -421,4 +421,229 @@ describe('TrendService', () => { expect(mock_pipeline.exec).not.toHaveBeenCalled(); }); }); + + describe('Error Handling', () => { + it('getTrending should handle redis errors gracefully', async () => { + jest.spyOn(redis_service, 'zrevrange').mockRejectedValue( + new Error('Redis connection failed') + ); + + await expect(trend_service.getTrending()).rejects.toThrow('Redis connection failed'); + }); + + it('getHashtagCategories should handle empty hashtag list', async () => { + const hashtag_names: string[] = []; + + const mock_pipeline = { + zscore: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([]), + }; + + jest.spyOn(redis_service, 'pipeline').mockReturnValue(mock_pipeline as any); + + const result = await trend_service.getHashtagCategories(hashtag_names); + + expect(result).toEqual({}); + }); + + it('insertCandidateHashtags should handle empty hashtags', async () => { + const hashtag_job: HashtagJobDto = { + hashtags: {}, + timestamp: Date.now(), + }; + + jest.spyOn(redis_service, 'zadd').mockResolvedValue(0 as any); + + await expect(trend_service.insertCandidateHashtags(hashtag_job)).resolves.not.toThrow(); + }); + + it('insertCandidateCategories should handle redis errors', async () => { + const hashtag_job: HashtagJobDto = { + hashtags: { + '#test': { Sports: 50, News: 30 }, + }, + timestamp: Date.now(), + }; + + const mock_pipeline = { + zadd: jest.fn().mockReturnThis(), + expire: jest.fn().mockReturnThis(), + exec: jest.fn().mockRejectedValue(new Error('Pipeline failed')), + }; + + jest.spyOn(redis_service, 'pipeline').mockReturnValue(mock_pipeline as any); + + await expect(trend_service.insertCandidateCategories(hashtag_job)).rejects.toThrow( + 'Pipeline failed' + ); + }); + + it('updateHashtagCounts should handle database errors', async () => { + const hashtag_job: HashtagJobDto = { + hashtags: { + '#test': { Sports: 50 }, + }, + timestamp: Date.now(), + }; + + jest.spyOn(redis_service, 'zincrby').mockRejectedValue( + new Error('Redis increment failed') + ); + + await expect(trend_service.updateHashtagCounts(hashtag_job)).rejects.toThrow( + 'Redis increment failed' + ); + }); + }); + + describe('Edge Cases', () => { + it('getTrending should handle very large limit', async () => { + const large_limit = 1000; + + jest.spyOn(redis_service, 'zrevrange').mockResolvedValue([]); + jest.spyOn(hashtag_repo, 'find').mockResolvedValue([]); + jest.spyOn(trend_service as any, 'getHashtagCategories').mockResolvedValue({}); + + const result = await trend_service.getTrending(undefined, large_limit); + + expect(redis_service.zrevrange).toHaveBeenCalledWith( + 'trending:global', + 0, + large_limit - 1, + 'WITHSCORES' + ); + expect(result.data).toEqual([]); + }); + + it('getTrending should handle special characters in hashtags', async () => { + const mock_trending_data = ['Ų…ØĩØą', '100.5']; + const mock_hashtags = [{ name: 'Ų…ØĩØą', usage_count: 500 }]; + const mock_categories = { Ų…ØĩØą: 'News' }; + + jest.spyOn(redis_service, 'zrevrange').mockResolvedValue(mock_trending_data as any); + jest.spyOn(hashtag_repo, 'find').mockResolvedValue(mock_hashtags as any); + jest.spyOn(trend_service as any, 'getHashtagCategories').mockResolvedValue( + mock_categories + ); + + const result = await trend_service.getTrending(); + + expect(result.data).toHaveLength(1); + expect(result.data[0].text).toBe('#Ų…ØĩØą'); + }); + + it('insertCandidateCategories should only include categories above threshold', async () => { + const hashtag_job: HashtagJobDto = { + hashtags: { + '#test': { Sports: 25, News: 25, Entertainment: 50 }, // Only Entertainment >= 30 + }, + timestamp: Date.now(), + }; + + const mock_pipeline = { + zadd: jest.fn().mockReturnThis(), + expire: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([]), + }; + + jest.spyOn(redis_service, 'pipeline').mockReturnValue(mock_pipeline as any); + + await trend_service.insertCandidateCategories(hashtag_job); + + // Verify zadd was called with Entertainment category + expect(mock_pipeline.zadd).toHaveBeenCalled(); + }); + + it('getHashtagCategories should handle scores correctly', async () => { + const hashtag_names = ['#test']; + + const mock_pipeline = { + zscore: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([ + [null, '100'], // Sports: 100 + [null, '50'], // News: 50 + [null, '30'], // Entertainment: 30 + ]), + }; + + jest.spyOn(redis_service, 'pipeline').mockReturnValue(mock_pipeline as any); + + const result = await trend_service.getHashtagCategories(hashtag_names); + + // Should return the category with highest score + expect(result['#test']).toBe('Sports'); + }); + }); + + describe('Integration Scenarios', () => { + it('should process complete hashtag job workflow', async () => { + const hashtag_job: HashtagJobDto = { + hashtags: { + '#test': { Sports: 100, News: 0, Entertainment: 0 }, + }, + timestamp: Date.now(), + }; + + // Mock all redis operations + jest.spyOn(redis_service, 'zadd').mockResolvedValue(1 as any); + jest.spyOn(redis_service, 'expire').mockResolvedValue(true as any); + jest.spyOn(redis_service, 'zincrby').mockResolvedValue('1' as any); + + const mock_pipeline = { + zadd: jest.fn().mockReturnThis(), + expire: jest.fn().mockReturnThis(), + exec: jest.fn().mockResolvedValue([]), + }; + + jest.spyOn(redis_service, 'pipeline').mockReturnValue(mock_pipeline as any); + + // Execute all trend operations + await trend_service.insertCandidateHashtags(hashtag_job); + await trend_service.insertCandidateCategories(hashtag_job); + await trend_service.updateHashtagCounts(hashtag_job); + + expect(redis_service.zadd).toHaveBeenCalled(); + expect(redis_service.zincrby).toHaveBeenCalled(); + }); + + it('getTrending should return properly formatted response', async () => { + const mock_trending_data = [ + 'javascript', + '100.5', + 'typescript', + '95.3', + 'nestjs', + '89.2', + ]; + const mock_hashtags = [ + { name: 'javascript', usage_count: 1500 }, + { name: 'typescript', usage_count: 1200 }, + { name: 'nestjs', usage_count: 980 }, + ]; + const mock_categories = { + javascript: 'News', + typescript: 'Entertainment', + nestjs: 'Only on Yapper', + }; + + jest.spyOn(redis_service, 'zrevrange').mockResolvedValue(mock_trending_data as any); + jest.spyOn(hashtag_repo, 'find').mockResolvedValue(mock_hashtags as any); + jest.spyOn(trend_service as any, 'getHashtagCategories').mockResolvedValue( + mock_categories + ); + + const result = await trend_service.getTrending(); + + // Verify structure + expect(result).toHaveProperty('data'); + expect(Array.isArray(result.data)).toBe(true); + result.data.forEach((trend: any) => { + expect(trend).toHaveProperty('text'); + expect(trend).toHaveProperty('posts_count'); + expect(trend).toHaveProperty('trend_rank'); + expect(trend).toHaveProperty('category'); + expect(trend).toHaveProperty('reference_id'); + }); + }); + }); }); diff --git a/src/trend/trend.service.ts b/src/trend/trend.service.ts index b731eccd..3eba9a73 100644 --- a/src/trend/trend.service.ts +++ b/src/trend/trend.service.ts @@ -1,7 +1,7 @@ import { Injectable } from '@nestjs/common'; import { RedisService } from 'src/redis/redis.service'; import { IHashtagScore } from './hashtag-score.interface'; -import { Cron } from '@nestjs/schedule'; +import { Cron, CronExpression } from '@nestjs/schedule'; import { Hashtag } from 'src/tweets/entities/hashtags.entity'; import { InjectRepository } from '@nestjs/typeorm'; import { In, Repository } from 'typeorm'; @@ -19,20 +19,28 @@ export class TrendService { ) {} private readonly WEIGHTS = { - VOLUME: 0.35, - ACCELERATION: 0.4, - RECENCY: 0.25, + VOLUME: 0.7, // Most important + ACCELERATION: 0.25, // Growth matters + RECENCY: 0.05, // Just a small boost }; - private readonly CATEGORIES = ['Sports', 'News', 'Entertainment']; private readonly GENERAL_CATEGORY = 'Only on Yapper'; + private readonly RECENCY_MIN_SCORE = 5; + private readonly TRENDING_WINDOW_HOURS = 6; private readonly TOP_N = 30; - private readonly MIN_BUCKETS = 5 * 60 * 1000; + private readonly MIN_BUCKETS = 30 * 60 * 1000; private readonly CATEGORY_THRESHOLD = 30; + private readonly RECENCY_FULL_SCORE_MINUTES = 30; async getTrending(category?: string, limit: number = 30) { - const key = category ? `trending:${category}` : 'trending:global'; + const normalized_category = category?.trim() + ? category.trim()[0].toUpperCase() + category.trim().slice(1).toLowerCase() + : null; + + const valid_category = this.CATEGORIES.includes(normalized_category || '') || null; + + const key = valid_category ? `trending:${normalized_category}` : 'trending:global'; const trending = await this.redis_service.zrevrange(key, 0, limit - 1, 'WITHSCORES'); @@ -42,7 +50,7 @@ export class TrendService { for (let i = 0; i < trending.length; i += 2) { result.push({ hashtag: trending[i], - score: parseFloat(trending[i + 1]), + score: Number.parseFloat(trending[i + 1]), }); hashtag_names.push(trending[i]); } @@ -51,15 +59,21 @@ export class TrendService { where: { name: In(hashtag_names) }, select: ['name', 'usage_count'], }); - const hashtag_categories = await this.getHashtagCategories(hashtag_names); - console.log(hashtag_categories); - const trends: HashtagResponseDto[] = result.map((item, index) => { + const existing_hashtag_names = new Set(hashtags.map((h) => h.name)); + + // Filter out hashtags that don't exist in the database + const filtered_result = result.filter((item) => existing_hashtag_names.has(item.hashtag)); + const filtered_hashtag_names = filtered_result.map((item) => item.hashtag); + + const hashtag_categories = await this.getHashtagCategories(filtered_hashtag_names); + + const trends: HashtagResponseDto[] = filtered_result.map((item, index) => { const hashtag_data = hashtags.find((h) => h.name === item.hashtag); return { text: '#' + item.hashtag, - posts_count: hashtag_data ? hashtag_data.usage_count : 0, + posts_count: hashtag_data!.usage_count, trend_rank: index + 1, category: hashtag_categories[item.hashtag] || this.GENERAL_CATEGORY, reference_id: item.hashtag.toLowerCase(), @@ -78,14 +92,12 @@ export class TrendService { for (const hashtag of hashtag_names) { for (const category of this.CATEGORIES) { - console.log(hashtag, category); pipeline.zscore(`candidates:${category}`, hashtag); } } const results = await pipeline.exec(); const hashtag_categories: Record = {}; - console.log(results); if (!results) { // Return default categories if pipeline fails @@ -105,8 +117,9 @@ export class TrendService { for (const category of this.CATEGORIES) { const result = results[result_index]; // Check if result exists and has valid data + if (result && result[1] !== null && result[1] !== undefined) { - const score = parseFloat(result[1] as string); + const score = Number.parseFloat(result[1] as string); if (score > max_score) { max_score = score; max_category = category; @@ -128,7 +141,7 @@ export class TrendService { //Expire after 2 hours // We may delegate it to trend worker - await this.redis_service.expire('candidates:active', 2 * 60 * 60); + await this.redis_service.expire('candidates:active', 24 * 60 * 60); } async insertCandidateCategories(hashtags: HashtagJobDto) { const pipeline = this.redis_service.pipeline(); @@ -142,7 +155,7 @@ export class TrendService { if (percent >= this.CATEGORY_THRESHOLD) { // Store hashtag with its category percentage as score pipeline.zadd(`candidates:${category_name}`, percent, hashtag); - pipeline.expire(`candidates:${category_name}`, 2 * 60 * 60); + pipeline.expire(`candidates:${category_name}`, 24 * 60 * 60); } } } @@ -163,26 +176,29 @@ export class TrendService { await this.redis_service.zincrby(`hashtag:${hashtag}`, 1, time_bucket.toString()); - await this.redis_service.expire(`hashtag:${hashtag}`, 1 * 60 * 60); + await this.redis_service.expire(`hashtag:${hashtag}`, 24 * 60 * 60); } await pipeline.exec(); } - @Cron('0 * * * *') + @Cron(CronExpression.EVERY_10_HOURS, { + name: 'trend-calculation-job', + timeZone: 'UTC', + }) async calculateTrend() { try { console.log('Calculate Trend.....'); const now = Date.now(); - const one_hour_ago = now - 60 * 60 * 1000; + const hours_ago = now - this.TRENDING_WINDOW_HOURS * 60 * 60 * 1000; // 1. Get active candidates (last hour) const active_hashtags = await this.redis_service.zrangebyscore( 'candidates:active', - one_hour_ago, + hours_ago, '+inf' ); - + console.log(active_hashtags.length, ' active hashtags found'); // 2. Calculate base scores once for all hashtags const hashtag_scores: Map = new Map(); @@ -198,9 +214,7 @@ export class TrendService { global_scored.sort((a, b) => b.score - a.score); const global_top_30 = global_scored.slice(0, this.TOP_N); await this.updateTrendingList('trending:global', global_top_30); - await this.calculateCategoryTrendsFromScores(hashtag_scores, one_hour_ago); - - console.log(global_top_30); + await this.calculateCategoryTrendsFromScores(hashtag_scores, hours_ago); } catch (err) { console.log(err); throw err; @@ -209,7 +223,7 @@ export class TrendService { private async calculateCategoryTrendsFromScores( hashtag_scores: Map, - one_hour_ago: number + hours_ago: number ) { for (const category of this.CATEGORIES) { try { @@ -229,7 +243,7 @@ export class TrendService { for (let i = 0; i < category_candidates.length; i += 2) { const hashtag = category_candidates[i]; - const category_percent = parseFloat(category_candidates[i + 1]); + const category_percent = Number.parseFloat(category_candidates[i + 1]); // Use pre-calculated score const base_score_data = hashtag_scores.get(hashtag); @@ -269,20 +283,21 @@ export class TrendService { const bucket_data: Array<{ timestamp: number; count: number }> = []; for (let i = 0; i < buckets_5m.length; i += 2) { bucket_data.push({ - timestamp: parseInt(buckets_5m[i]), - count: parseFloat(buckets_5m[i + 1]), + timestamp: Number.parseInt(buckets_5m[i]), + count: Number.parseFloat(buckets_5m[i + 1]), }); } // Calculate individual scores const volume_score = this.calculateTweetVolume(bucket_data); - // const acceleration_score = this.calculateAccelerationScore(bucket_data); const acceleration_score = this.velocity_calculator.calculateFinalMomentum(bucket_data); - console.log(acceleration_score); const last_seen = await this.redis_service.zscore('candidates:active', hashtag); - const last_seen_time = last_seen ? parseInt(last_seen) : null; + const last_seen_time = last_seen ? Number.parseInt(last_seen) : null; const recency_score = this.calculateRecencyScore(last_seen_time); + console.log( + `Hashtag: ${hashtag}, Volume: ${volume_score.toFixed(2)}, Acceleration: ${acceleration_score.toFixed(2)}, Recency: ${recency_score.toFixed(2)}` + ); const final_score = this.calculateFinalScore( volume_score, @@ -351,11 +366,17 @@ export class TrendService { const minutes_ago = (Date.now() - last_seen) / (60 * 1000); - if (minutes_ago <= 1) return 100; + // Full score for recent hashtags + if (minutes_ago <= this.RECENCY_FULL_SCORE_MINUTES) return 100; + + // Linear decay over the trending window + const hours_ago = minutes_ago / 60; - const score = 100 - (minutes_ago / 60) * 100; + // Decay from 100 to RECENCY_MIN_SCORE instead of 0 + const score = + 100 - (hours_ago / this.TRENDING_WINDOW_HOURS) * (100 - this.RECENCY_MIN_SCORE); - return Math.max(0, score); + return Math.max(this.RECENCY_MIN_SCORE, Math.min(100, score)); } private calculateFinalScore(volume: number, acceleration: number, recency: number): number { diff --git a/src/trend/velocity-exponential-detector.ts b/src/trend/velocity-exponential-detector.ts index db76dcb6..380517f3 100644 --- a/src/trend/velocity-exponential-detector.ts +++ b/src/trend/velocity-exponential-detector.ts @@ -13,15 +13,11 @@ interface IVelocityAnalysis { interface IExponentialAnalysis { growth_rate: number; // 'b' in y = ae^(bx) - r_squared: number; // fit quality (0-1) - double_time: number; // minutes to double is_exponential: boolean; // fits exponential pattern? - prediction: number; // predicted next bucket } interface IMomentumResult { score: number; // 0-100 momentum score - confidence: 'LOW' | 'MEDIUM' | 'HIGH'; velocity: IVelocityAnalysis; exponential: IExponentialAnalysis; } @@ -53,10 +49,8 @@ export class VelocityExponentialDetector { // Phase 3: Combined Scoring const score = this.calculateCombinedScore(velocity_analysis, exponential_analysis); - const confidence = this.calculateConfidence(exponential_analysis.r_squared, sorted.length); return { score, - confidence, velocity: velocity_analysis, exponential: exponential_analysis, }; @@ -121,48 +115,30 @@ export class VelocityExponentialDetector { // Fit exponential curve: y = a * e^(b*x) let growth_rate = 0; - let r_squared = 0; - let prediction = 0; let exponential_result; try { exponential_result = regression.exponential(data_points); - // Extract parameters - const a = exponential_result.equation[0]; // coefficient + // // Extract parameters const b = exponential_result.equation[1]; // exponent (growth rate) growth_rate = b; - r_squared = exponential_result.r2; - - // Predict next bucket (5 minutes ahead) - const last_x = data_points[data_points.length - 1][0]; - prediction = exponential_result.predict(last_x + 5)[1]; } catch (error) { // Exponential fit failed (data might be flat or declining) // Fall back to linear const linear_result = regression.linear(data_points); - r_squared = linear_result.r2; - prediction = linear_result.predict(data_points[data_points.length - 1][0] + 5)[1]; const m = linear_result.equation[0]; // slope growth_rate = m; } - // Calculate doubling time (how long to 2x current size) - // Formula: t = ln(2) / b - const double_time = growth_rate > 0 ? Math.log(2) / growth_rate : Infinity; - // Determine if truly exponential - const is_exponential = - growth_rate >= this.EXPONENTIAL_THRESHOLD && r_squared >= this.MEDIUM_CONFIDENCE_R2; + const is_exponential = growth_rate >= this.EXPONENTIAL_THRESHOLD; return { growth_rate: Math.round(growth_rate * 10000) / 10000, - r_squared: Math.round(r_squared * 10000) / 10000, - double_time: Math.round(double_time * 100) / 100, is_exponential, - prediction: Math.round(prediction), }; } /** @@ -184,29 +160,19 @@ export class VelocityExponentialDetector { // Fit Quality Score (0-100) // R² directly translates to 0-100 - const fit_score = exponential.r_squared * 100; // Weighted combination - const final_score = velocity_score * 0.4 + exponential_score * 0.4 + fit_score * 0.2; + const final_score = velocity_score * 0.6 + exponential_score * 0.4; // Bonus: Add acceleration boost const acceleration_bonus = velocity.is_accelerating ? 10 : 0; return Math.min(100, Math.max(0, final_score + acceleration_bonus)); } - /** - * Calculate confidence based on fit quality and data points - */ - private calculateConfidence(r_squared: number, data_points: number): 'LOW' | 'MEDIUM' | 'HIGH' { - if (data_points < 3) return 'LOW'; - if (r_squared >= this.HIGH_CONFIDENCE_R2) return 'HIGH'; - if (r_squared >= this.MEDIUM_CONFIDENCE_R2) return 'MEDIUM'; - return 'LOW'; - } + private getEmptyResult(): IMomentumResult { return { score: 0, - confidence: 'LOW', velocity: { velocities: [], current_velocity: 0, @@ -216,10 +182,7 @@ export class VelocityExponentialDetector { }, exponential: { growth_rate: 0, - r_squared: 0, - double_time: Infinity, is_exponential: false, - prediction: 0, }, }; } diff --git a/src/tweets/constants.ts b/src/tweets/constants.ts index 98cce1d8..aa0a79d7 100644 --- a/src/tweets/constants.ts +++ b/src/tweets/constants.ts @@ -41,11 +41,13 @@ If the tweet is already very short or simple, produce a **more concise rewrite** If the tweet contains multiple ideas, summarize in **1–2 short sentences**. Rules: +- Begin the summary with "The tweet talks aboutâ€Ļ" (use Arabic equivalent "Ø§Ų„ØĒØēØąŲŠØ¯ØŠ ØĒØĒحدØĢ ØšŲ†â€Ļ" if the tweet is in Arabic). - Provide a summary that is **meaningfully shorter** than the original. - Do NOT repeat the original phrasing or structure. - Do NOT add any new information. - Keep the tone neutral and simple. - Remove emojis, hashtags, and usernames. +- If the tweet is in Arabic, return the summary in Arabic. For all other languages, return the summary in English. Tweet: "${content}" diff --git a/src/tweets/deleted-tweets-cleanup.service.ts b/src/tweets/deleted-tweets-cleanup.service.ts new file mode 100644 index 00000000..96c24aab --- /dev/null +++ b/src/tweets/deleted-tweets-cleanup.service.ts @@ -0,0 +1,84 @@ +import { Injectable, Logger } from '@nestjs/common'; +import { InjectRepository } from '@nestjs/typeorm'; +import { Column, CreateDateColumn, Entity, LessThan, PrimaryColumn, Repository } from 'typeorm'; +import { Cron, CronExpression } from '@nestjs/schedule'; +import { EsDeleteTweetJobService } from 'src/background-jobs/elasticsearch/es-delete-tweet.service'; +import { Hashtag } from './entities/hashtags.entity'; + +// Entity for the deleted_tweets_log table +@Entity('deleted_tweets_log') +export class DeletedTweetsLog { + @PrimaryColumn({ type: 'uuid' }) + tweet_id: string; + + @Column({ type: 'text', nullable: true }) + content: string; + + @CreateDateColumn({ type: 'timestamptz' }) + deleted_at: Date; +} + +@Injectable() +export class DeletedTweetsCleanupService { + private readonly logger = new Logger(DeletedTweetsCleanupService.name); + + constructor( + @InjectRepository(DeletedTweetsLog) + private readonly deleted_tweets_repository: Repository, + @InjectRepository(Hashtag) + private readonly hashtag_repository: Repository, + private readonly es_delete_tweet_service: EsDeleteTweetJobService + ) {} + + @Cron(CronExpression.EVERY_MINUTE) + async processDeletedTweets(): Promise { + try { + const batch_size = 100; + + const deleted_tweets = await this.deleted_tweets_repository.find({ + take: batch_size, + order: { deleted_at: 'ASC' }, + }); + + if (deleted_tweets.length === 0) { + return; + } + + this.logger.log( + `Processing ${deleted_tweets.length} deleted tweets for ES cleanup and hashtag decrement` + ); + + const tweet_ids = deleted_tweets.map((t) => t.tweet_id); + + await this.es_delete_tweet_service.queueDeleteTweet({ + tweet_ids, + }); + + await this.deleted_tweets_repository.delete(tweet_ids); + + this.logger.log(`Successfully processed ${deleted_tweets.length} deleted tweets`); + } catch (error) { + this.logger.error('Error processing deleted tweets for ES cleanup', error); + } + } + + @Cron(CronExpression.EVERY_DAY_AT_2AM) + async cleanupOldEntries(): Promise { + try { + const seven_days_ago = new Date(); + seven_days_ago.setDate(seven_days_ago.getDate() - 7); + + const result = await this.deleted_tweets_repository.delete({ + deleted_at: LessThan(seven_days_ago), + }); + + if (result.affected && result.affected > 0) { + this.logger.warn( + `Cleaned up ${result.affected} old deleted tweet log entries that were not processed` + ); + } + } catch (error) { + this.logger.error('Error cleaning up old deleted tweets log entries', error); + } + } +} diff --git a/src/tweets/dto/create-tweet.dto.ts b/src/tweets/dto/create-tweet.dto.ts index 95294d25..19011496 100644 --- a/src/tweets/dto/create-tweet.dto.ts +++ b/src/tweets/dto/create-tweet.dto.ts @@ -8,6 +8,7 @@ export class CreateTweetDTO { example: 'This is my first tweet!', maxLength: POST_CONTENT_LENGTH, }) + // @Transform(({ value }) => value.trim().replace(/@([a-zA-Z0-9_]+)/g, '')) @IsString() @MaxLength(POST_CONTENT_LENGTH) content: string; diff --git a/src/tweets/dto/get-tweet-likes-query.dto.ts b/src/tweets/dto/get-tweet-likes-query.dto.ts index 6161f2f5..2e17e64b 100644 --- a/src/tweets/dto/get-tweet-likes-query.dto.ts +++ b/src/tweets/dto/get-tweet-likes-query.dto.ts @@ -1,7 +1,6 @@ import { ApiProperty } from '@nestjs/swagger'; import { Type } from 'class-transformer'; -import { IsInt, IsOptional, IsString, Max, MaxLength, Min } from 'class-validator'; -import { STRING_MAX_LENGTH } from 'src/constants/variables'; +import { IsInt, IsOptional, Max, Min } from 'class-validator'; export class GetTweetLikesQueryDto { @ApiProperty({ diff --git a/src/tweets/dto/get-tweet-reposts-query.dto.ts b/src/tweets/dto/get-tweet-reposts-query.dto.ts index 7c60b0d9..67041a65 100644 --- a/src/tweets/dto/get-tweet-reposts-query.dto.ts +++ b/src/tweets/dto/get-tweet-reposts-query.dto.ts @@ -1,7 +1,6 @@ import { ApiProperty } from '@nestjs/swagger'; import { Type } from 'class-transformer'; -import { IsInt, IsOptional, IsString, Max, MaxLength, Min } from 'class-validator'; -import { STRING_MAX_LENGTH } from 'src/constants/variables'; +import { IsInt, IsOptional, Max, Min } from 'class-validator'; export class GetTweetRepostsQueryDto { @ApiProperty({ diff --git a/src/tweets/dto/reposted-by-user.dto.ts b/src/tweets/dto/reposted-by-user.dto.ts index 5828c814..be4b362b 100644 --- a/src/tweets/dto/reposted-by-user.dto.ts +++ b/src/tweets/dto/reposted-by-user.dto.ts @@ -19,6 +19,12 @@ export class RepostedByUserDTO { }) name: string; + @ApiProperty({ + description: 'Username', + example: 'John123', + }) + username: string; + @ApiProperty({ description: 'When the tweet was reposted (ISO 8601 timestamp)', example: '2025-10-31T12:00:00.000Z', diff --git a/src/tweets/dto/tweet-reply-response.ts b/src/tweets/dto/tweet-reply-response.ts index c7db7d5c..52e19be9 100644 --- a/src/tweets/dto/tweet-reply-response.ts +++ b/src/tweets/dto/tweet-reply-response.ts @@ -1,4 +1,4 @@ -import { Expose, Type } from 'class-transformer'; +import { Expose } from 'class-transformer'; import { TweetResponseDTO } from './tweet-response.dto'; import { ApiProperty } from '@nestjs/swagger'; diff --git a/src/tweets/dto/tweet-response.dto.ts b/src/tweets/dto/tweet-response.dto.ts index 76d95e11..a7ebfac7 100644 --- a/src/tweets/dto/tweet-response.dto.ts +++ b/src/tweets/dto/tweet-response.dto.ts @@ -3,6 +3,7 @@ import { UserResponseDTO } from './user-response.dto'; import { RepostedByUserDTO } from './reposted-by-user.dto'; import { Expose, Transform, Type } from 'class-transformer'; import { TweetType } from 'src/shared/enums/tweet-types.enum'; +import { IsOptional } from 'class-validator'; export class TweetResponseDTO { @Expose() @@ -28,6 +29,15 @@ export class TweetResponseDTO { }) type: TweetType; + @Expose() + @ApiProperty({ + description: 'Tweet type: tweet (actual)', + example: 'tweet', + enum: TweetType, + }) + @IsOptional() + post_type?: TweetType; + @Expose() @ApiProperty({ description: 'Tweet content', @@ -159,7 +169,7 @@ export class TweetResponseDTO { is_reposted?: boolean = false; @Expose() - @Transform(({ obj }) => !!obj.current_user_bookmark) + @Transform(({ obj }) => !!obj.current_user_bookmark || obj.is_bookmarked) @ApiProperty({ description: 'Whether the current user has bookmarked this tweet', example: false, @@ -175,6 +185,13 @@ export class TweetResponseDTO { }) reposted_by?: RepostedByUserDTO; + @Expose() + @ApiProperty({ + description: 'mentions array containing usernames mentioned in the tweet', + type: [String], + }) + mentions: string[]; + @Expose() @ApiProperty({ description: 'Tweet creation timestamp', diff --git a/src/tweets/dto/update-tweet-with-quote.dto.ts b/src/tweets/dto/update-tweet-with-quote.dto.ts index 7039a7f5..0a8a9281 100644 --- a/src/tweets/dto/update-tweet-with-quote.dto.ts +++ b/src/tweets/dto/update-tweet-with-quote.dto.ts @@ -1,6 +1,5 @@ -import { PartialType } from '@nestjs/swagger'; +import { ApiProperty, PartialType } from '@nestjs/swagger'; import { CreateTweetDTO } from './create-tweet.dto'; -import { ApiProperty } from '@nestjs/swagger'; import { IsOptional, IsString, MaxLength } from 'class-validator'; import { STRING_MAX_LENGTH } from 'src/constants/variables'; diff --git a/src/tweets/dto/user-response.dto.ts b/src/tweets/dto/user-response.dto.ts index e580d211..f37bc97b 100644 --- a/src/tweets/dto/user-response.dto.ts +++ b/src/tweets/dto/user-response.dto.ts @@ -1,5 +1,5 @@ import { ApiProperty } from '@nestjs/swagger'; -import { Expose, Transform } from 'class-transformer'; +import { Expose } from 'class-transformer'; export class UserResponseDTO { @Expose() diff --git a/src/tweets/entities/hashtags.entity.ts b/src/tweets/entities/hashtags.entity.ts index 4950e09d..7abf8792 100644 --- a/src/tweets/entities/hashtags.entity.ts +++ b/src/tweets/entities/hashtags.entity.ts @@ -1,13 +1,12 @@ -import { User } from '../../user/entities/user.entity'; import { Column, CreateDateColumn, DeleteDateColumn, Entity, - JoinColumn, - ManyToOne, + OneToMany, PrimaryColumn, } from 'typeorm'; +import { TweetHashtag } from './tweet-hashtag.entity'; @Entity('hashtag') export class Hashtag { @@ -17,14 +16,16 @@ export class Hashtag { @Column({ type: 'int', default: 0 }) usage_count: number; - @ManyToOne(() => User, (user) => user.hashtags, {}) - @JoinColumn({ name: 'created_by', referencedColumnName: 'id' }) - created_by: User; - @CreateDateColumn({ type: 'timestamptz' }) created_at: Date; + @Column({ type: 'varchar', nullable: true }) + category?: string; + // I guess we won't need this but just in case @DeleteDateColumn({ type: 'timestamptz' }) deleted_at: Date; + + @OneToMany(() => TweetHashtag, (tweet_hashtag) => tweet_hashtag.hashtag) + tweet_hashtags: TweetHashtag[]; } diff --git a/src/tweets/entities/tweet-hashtag.entity.ts b/src/tweets/entities/tweet-hashtag.entity.ts new file mode 100644 index 00000000..ca51240b --- /dev/null +++ b/src/tweets/entities/tweet-hashtag.entity.ts @@ -0,0 +1,26 @@ +import { Column, Entity, ForeignKey, JoinColumn, ManyToOne, PrimaryColumn } from 'typeorm'; +import { Tweet } from './tweet.entity'; +import { Hashtag } from './hashtags.entity'; + +@Entity('tweet_hashtags') +export class TweetHashtag { + @PrimaryColumn('uuid') + tweet_id: string; + + @PrimaryColumn('varchar') + hashtag_name: string; + @Column({ type: 'timestamptz', default: () => 'CURRENT_TIMESTAMP' }) + tweet_created_at: Date; + + @ManyToOne(() => Tweet, (tweet) => tweet.tweet_hashtags, { + onDelete: 'CASCADE', + }) + @JoinColumn({ name: 'tweet_id' }) + tweet: Tweet; + + @ManyToOne(() => Hashtag, (hashtag) => hashtag.tweet_hashtags, { + onDelete: 'CASCADE', + }) + @JoinColumn({ name: 'hashtag_name' }) + hashtag: Hashtag; +} diff --git a/src/tweets/entities/tweet-repost.entity.ts b/src/tweets/entities/tweet-repost.entity.ts index b2392b7e..83d3e946 100644 --- a/src/tweets/entities/tweet-repost.entity.ts +++ b/src/tweets/entities/tweet-repost.entity.ts @@ -1,13 +1,4 @@ -import { - Column, - CreateDateColumn, - Entity, - JoinColumn, - ManyToOne, - PrimaryColumn, - PrimaryGeneratedColumn, - Unique, -} from 'typeorm'; +import { CreateDateColumn, Entity, JoinColumn, ManyToOne, PrimaryColumn } from 'typeorm'; import { Tweet } from './tweet.entity'; import { User } from '../../user/entities/user.entity'; import { UserFollows } from '../../user/entities/user-follows.entity'; diff --git a/src/tweets/entities/tweet-summary.entity.ts b/src/tweets/entities/tweet-summary.entity.ts index 84fba45d..67b99539 100644 --- a/src/tweets/entities/tweet-summary.entity.ts +++ b/src/tweets/entities/tweet-summary.entity.ts @@ -1,13 +1,4 @@ -import { - Column, - Entity, - Index, - JoinColumn, - ManyToOne, - OneToOne, - PrimaryColumn, - UpdateDateColumn, -} from 'typeorm'; +import { Column, Entity, JoinColumn, OneToOne, PrimaryColumn, UpdateDateColumn } from 'typeorm'; import { Tweet } from './tweet.entity'; @Entity('tweet_summaries') diff --git a/src/tweets/entities/tweet.entity.ts b/src/tweets/entities/tweet.entity.ts index 6946ebcd..05c40b9f 100644 --- a/src/tweets/entities/tweet.entity.ts +++ b/src/tweets/entities/tweet.entity.ts @@ -17,6 +17,7 @@ import { TweetQuote } from './tweet-quote.entity'; import { TweetRepost } from './tweet-repost.entity'; import { TweetReply } from './tweet-reply.entity'; import { TweetBookmark } from './tweet-bookmark.entity'; +import { TweetHashtag } from './tweet-hashtag.entity'; import { UserFollows } from '../../user/entities/user-follows.entity'; import { TweetType } from '../../shared/enums/tweet-types.enum'; import { TweetSummary } from './tweet-summary.entity'; @@ -62,6 +63,9 @@ export class Tweet { @Column({ name: 'num_bookmarks', type: 'int', default: 0 }) num_bookmarks: number; + @Column({ name: 'mentions', array: true, type: 'text', default: () => "'{}'" }) + mentions: string[]; + @CreateDateColumn({ type: 'timestamptz' }) created_at: Date; @@ -101,6 +105,9 @@ export class Tweet { @OneToOne(() => TweetSummary, (summary) => summary.tweet, { onDelete: 'CASCADE' }) summary: TweetSummary; + @OneToMany(() => TweetHashtag, (tweet_hashtag) => tweet_hashtag.tweet) + tweet_hashtags: TweetHashtag[]; + // Virtual fields for current user interactions (loaded via leftJoinAndMapOne in queries) current_user_like?: TweetLike | null; current_user_repost?: TweetRepost | null; diff --git a/src/tweets/entities/user-posts-view.entity.ts b/src/tweets/entities/user-posts-view.entity.ts index f5337b63..651d9233 100644 --- a/src/tweets/entities/user-posts-view.entity.ts +++ b/src/tweets/entities/user-posts-view.entity.ts @@ -6,6 +6,7 @@ import { UserFollows } from '../../user/entities/user-follows.entity'; @ViewEntity({ name: 'user_posts_view', + materialized: false, expression: ` SELECT t.tweet_id::text AS id, @@ -13,7 +14,7 @@ import { UserFollows } from '../../user/entities/user-follows.entity'; t.user_id AS tweet_author_id, t.tweet_id, NULL::uuid AS repost_id, - 'tweet' AS post_type, + t.type::text AS post_type, t.created_at AS post_date, t.type::text AS type, t.content, @@ -24,6 +25,8 @@ import { UserFollows } from '../../user/entities/user-follows.entity'; t.num_views, t.num_quotes, t.num_replies, + t.num_bookmarks, + t.mentions, t.created_at, t.updated_at, u.username, @@ -35,12 +38,18 @@ import { UserFollows } from '../../user/entities/user-follows.entity'; u.verified, u.bio, NULL::text AS reposted_by_name, + NULL::text AS reposted_by_username, COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id, - trep.conversation_id AS conversation_id + trep.conversation_id AS conversation_id, + conv_tweet.user_id AS conversation_user_id, + COALESCE(orig_quote_tweet.user_id, orig_reply_tweet.user_id) AS parent_user_id FROM tweets t INNER JOIN "user" u ON t.user_id = u.id LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id + LEFT JOIN tweets conv_tweet ON trep.conversation_id = conv_tweet.tweet_id + LEFT JOIN tweets orig_quote_tweet ON tq.original_tweet_id = orig_quote_tweet.tweet_id + LEFT JOIN tweets orig_reply_tweet ON trep.original_tweet_id = orig_reply_tweet.tweet_id UNION ALL @@ -61,6 +70,8 @@ import { UserFollows } from '../../user/entities/user-follows.entity'; t.num_views, t.num_quotes, t.num_replies, + t.num_bookmarks, + t.mentions, t.created_at, t.updated_at, u.username, @@ -72,8 +83,11 @@ import { UserFollows } from '../../user/entities/user-follows.entity'; u.verified, u.bio, reposter.name AS reposted_by_name, + reposter.username AS reposted_by_username, COALESCE(tq.original_tweet_id, trep.original_tweet_id) AS parent_id, - trep.conversation_id AS conversation_id + trep.conversation_id AS conversation_id, + conv_tweet.user_id AS conversation_user_id, + COALESCE(orig_quote_tweet.user_id, orig_reply_tweet.user_id) AS parent_user_id FROM tweet_reposts tr INNER JOIN tweets t ON tr.tweet_id = t.tweet_id @@ -81,6 +95,9 @@ import { UserFollows } from '../../user/entities/user-follows.entity'; INNER JOIN "user" reposter ON tr.user_id = reposter.id LEFT JOIN tweet_quotes tq ON t.tweet_id = tq.quote_tweet_id LEFT JOIN tweet_replies trep ON t.tweet_id = trep.reply_tweet_id + LEFT JOIN tweets conv_tweet ON trep.conversation_id = conv_tweet.tweet_id + LEFT JOIN tweets orig_quote_tweet ON tq.original_tweet_id = orig_quote_tweet.tweet_id + LEFT JOIN tweets orig_reply_tweet ON trep.original_tweet_id = orig_reply_tweet.tweet_id `, }) export class UserPostsView { @@ -132,6 +149,12 @@ export class UserPostsView { @ViewColumn() num_replies: number; + @ViewColumn() + num_bookmarks: number; + + @ViewColumn() + mentions: string[]; + @ViewColumn() created_at: Date; @@ -165,12 +188,21 @@ export class UserPostsView { @ViewColumn() reposted_by_name: string | null; + @ViewColumn() + reposted_by_username: string | null; + @ViewColumn() parent_id: string | null; @ViewColumn() conversation_id: string | null; + @ViewColumn() + conversation_user_id: string | null; + + @ViewColumn() + parent_user_id: string | null; + // Virtual relations for joins (tweet author) @ManyToOne(() => User) @JoinColumn({ name: 'tweet_author_id' }) diff --git a/src/tweets/queries/get-following-tweets.query.ts b/src/tweets/queries/get-following-tweets.query.ts deleted file mode 100644 index 6702e15b..00000000 --- a/src/tweets/queries/get-following-tweets.query.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { TweetResponseDTO } from '../dto/tweet-response.dto'; - -export function getFollowingTweetsQuery(cursor_condition: string, limit: number = 10): string { - return ` SELECT - post.*, - json_build_object( - 'id', u.id, - 'username', u.username, - 'name', u.name, - 'avatar_url', u.avatar_url, - 'verified', u.verified, - 'bio', u.bio, - 'cover_url', u.cover_url, - 'followers', u.followers, - 'following', u.following - ) as user, - CASE - WHEN post.post_type = 'repost' THEN json_build_object( - 'id', reposted_by.id, - 'name', reposted_by.name - ) - ELSE NULL - END as reposted_by_user, - --TODO: It cannot be null? - COALESCE(post.type, 'tweet') as tweet_type, - - -- Get parent_id if it is a quote or reply - -- Get parent data if it is a quote - -- TODO: Reply - - CASE WHEN likes.user_id IS NOT NULL THEN TRUE ELSE FALSE END as is_liked, - CASE WHEN reposts.user_id IS NOT NULL THEN TRUE ELSE FALSE END as is_reposted, - CASE WHEN follows.follower_id IS NOT NULL THEN TRUE ELSE FALSE END as is_following - - - FROM user_posts_view post - LEFT JOIN "user" u ON u.id = post.tweet_author_id - LEFT JOIN "user" reposted_by - ON reposted_by.id = post.profile_user_id - AND post.post_type = 'repost' - - LEFT JOIN tweet_likes likes - ON likes.tweet_id = post.tweet_id - AND likes.user_id = $1 - - LEFT JOIN tweet_reposts reposts - ON reposts.tweet_id = post.tweet_id - AND reposts.user_id = $1 - - LEFT JOIN user_follows follows - ON follows.follower_id = $1 - AND follows.followed_id = post.tweet_author_id - - WHERE ( - post.tweet_author_id = $1 - OR post.tweet_author_id IN ( - SELECT followed_id FROM user_follows WHERE follower_id = $1 - ) - OR post.profile_user_id = $1 - OR post.profile_user_id IN ( - SELECT followed_id FROM user_follows WHERE follower_id = $1 - ) - ) - AND post.tweet_author_id NOT IN ( - SELECT muted_id FROM user_mutes WHERE muter_id = $1 - ) - AND post.profile_user_id NOT IN ( - SELECT muted_id FROM user_mutes WHERE muter_id = $1 - ) - ${cursor_condition} - ORDER BY post.created_at - LIMIT ${limit} - - `; -} diff --git a/src/tweets/queries/get-foryou-tweets.query.ts b/src/tweets/queries/get-foryou-tweets.query.ts deleted file mode 100644 index c6e8c877..00000000 --- a/src/tweets/queries/get-foryou-tweets.query.ts +++ /dev/null @@ -1,65 +0,0 @@ -import { TweetResponseDTO } from '../dto/tweet-response.dto'; - -export function getForyouTweetsQuery(cursor_condition: string, limit: number = 20): string { - return ` SELECT - post.*, - json_build_object( - 'id', u.id, - 'username', u.username, - 'name', u.name, - 'avatar_url', u.avatar_url, - 'verified', u.verified, - 'bio', u.bio, - 'cover_url', u.cover_url, - 'followers', u.followers, - 'following', u.following - ) as user, - CASE - WHEN post.post_type = 'repost' THEN json_build_object( - 'id', reposted_by.id, - 'name', reposted_by.name - ) - ELSE NULL - END as reposted_by_user, - --TODO: It cannot be null? - COALESCE(post.type, 'tweet') as tweet_type, - - -- Get parent_id if it is a quote or reply - -- Get parent data if it is a quote - -- TODO: Reply - - CASE WHEN likes.user_id IS NOT NULL THEN TRUE ELSE FALSE END as is_liked, - CASE WHEN reposts.user_id IS NOT NULL THEN TRUE ELSE FALSE END as is_reposted, - CASE WHEN follows.follower_id IS NOT NULL THEN TRUE ELSE FALSE END as is_following - - - FROM user_posts_view post - LEFT JOIN "user" u ON u.id = post.tweet_author_id - LEFT JOIN "user" reposted_by - ON reposted_by.id = post.profile_user_id - AND post.post_type = 'repost' - - LEFT JOIN tweet_likes likes - ON likes.tweet_id = post.tweet_id - AND likes.user_id = $1 - - LEFT JOIN tweet_reposts reposts - ON reposts.tweet_id = post.tweet_id - AND reposts.user_id = $1 - - LEFT JOIN user_follows follows - ON follows.follower_id = $1 - AND follows.followed_id = post.tweet_author_id - - WHERE post.tweet_author_id NOT IN ( - SELECT muted_id FROM user_mutes WHERE muter_id = $1 - ) - AND post.profile_user_id NOT IN ( - SELECT muted_id FROM user_mutes WHERE muter_id = $1 - ) - ${cursor_condition} - ORDER BY RANDOM() - LIMIT ${limit} - - `; -} diff --git a/src/tweets/queries/get-posts-profile-view.query.ts b/src/tweets/queries/get-posts-profile-view.query.ts new file mode 100644 index 00000000..2b96806d --- /dev/null +++ b/src/tweets/queries/get-posts-profile-view.query.ts @@ -0,0 +1,81 @@ +import { SelectQueryBuilder } from 'typeorm'; + +export function getPostsByUserIdProfileQuery( + query: SelectQueryBuilder, + user_id: string +): SelectQueryBuilder { + return query + .select([ + 'tweet.tweet_id AS tweet_id', + 'tweet.profile_user_id AS profile_user_id', + 'tweet.tweet_author_id AS tweet_author_id', + 'tweet.repost_id AS repost_id', + 'tweet.post_type AS post_type', + 'tweet.type AS type', + 'tweet.content AS content', + 'tweet.images AS images', + 'tweet.videos AS videos', + 'tweet.num_likes AS num_likes', + 'tweet.num_reposts AS num_reposts', + 'tweet.num_views AS num_views', + 'tweet.num_bookmarks AS num_bookmarks', + 'tweet.num_quotes AS num_quotes', + 'tweet.num_replies AS num_replies', + 'tweet.created_at AS created_at', + 'tweet.post_date AS post_date', + 'tweet.updated_at AS updated_at', + 'tweet.mentions AS mentions', + `json_build_object( + 'id', tweet.tweet_author_id, + 'username', tweet.username, + 'name', tweet.name, + 'avatar_url', tweet.avatar_url, + 'cover_url', tweet.cover_url, + 'verified', tweet.verified, + 'bio', tweet.bio, + 'followers', tweet.followers, + 'following', tweet.following + ) AS user`, + ]) + .where('tweet.profile_user_id = :user_id', { user_id }); +} + +export function getPostsByUserIdProfileQueryWithoutView( + query: SelectQueryBuilder, + user_id: string +): SelectQueryBuilder { + return query.select([ + 'tweet.tweet_id AS tweet_id', + 'tweet.profile_user_id AS profile_user_id', + 'tweet.tweet_author_id AS tweet_author_id', + 'tweet.repost_id AS repost_id', + 'tweet.post_type AS post_type', + 'tweet.type AS type', + 'tweet.content AS content', + 'tweet.type AS type', + 'tweet.post_date AS post_date', + 'tweet.images AS images', + 'tweet.videos AS videos', + 'tweet.num_likes AS num_likes', + 'tweet.num_reposts AS num_reposts', + 'tweet.num_views AS num_views', + 'tweet.num_quotes AS num_quotes', + 'tweet.num_replies AS num_replies', + 'tweet.num_bookmarks AS num_bookmarks', + 'tweet.created_at AS created_at', + 'tweet.updated_at AS updated_at', + 'tweet.mentions AS mentions', + 'like.created_at AS liked_at', + `json_build_object( + 'id', tweet.tweet_author_id, + 'username', tweet.username, + 'name', tweet.name, + 'avatar_url', tweet.avatar_url, + 'cover_url', tweet.cover_url, + 'verified', tweet.verified, + 'bio', tweet.bio, + 'followers', tweet.followers, + 'following', tweet.following + ) AS user`, + ]); +} diff --git a/src/tweets/queries/reply-parent-chain.query.ts b/src/tweets/queries/reply-parent-chain.query.ts index 7603639f..46a440d3 100644 --- a/src/tweets/queries/reply-parent-chain.query.ts +++ b/src/tweets/queries/reply-parent-chain.query.ts @@ -8,11 +8,13 @@ export function getReplyWithParentChainQuery(current_user_id?: string) { t.content, t.images, t.videos, + t.num_bookmarks, t.num_likes, t.num_reposts, t.num_views, t.num_quotes, t.num_replies, + t.mentions, t.created_at, t.updated_at, COALESCE(tr.original_tweet_id, tq.original_tweet_id) as parent_tweet_id, @@ -32,10 +34,12 @@ export function getReplyWithParentChainQuery(current_user_id?: string) { t.images, t.videos, t.num_likes, + t.num_bookmarks, t.num_reposts, t.num_views, t.num_quotes, t.num_replies, + t.mentions, t.created_at, t.updated_at, COALESCE(tr.original_tweet_id, tq.original_tweet_id) as parent_tweet_id, diff --git a/src/tweets/queries/tweet-fields-select.query.ts b/src/tweets/queries/tweet-fields-select.query.ts index 886b723e..e74841e7 100644 --- a/src/tweets/queries/tweet-fields-select.query.ts +++ b/src/tweets/queries/tweet-fields-select.query.ts @@ -11,6 +11,7 @@ export const tweet_fields_slect = [ 'tweet.num_quotes', 'tweet.num_replies', 'tweet.num_bookmarks', + 'tweet.mentions', 'tweet.created_at', 'tweet.updated_at', 'user.id', @@ -23,52 +24,3 @@ export const tweet_fields_slect = [ 'user.followers', 'user.following', ]; - -// if (current_user_id) { -// query -// .leftJoinAndMapOne( -// 'quote_tweet.current_user_like', -// TweetLike, -// 'current_user_like', -// 'current_user_like.tweet_id = quote_tweet.tweet_id AND current_user_like.user_id = :current_user_id', -// { current_user_id } -// ) -// .leftJoinAndMapOne( -// 'quote_tweet.current_user_repost', -// TweetRepost, -// 'current_user_repost', -// 'current_user_repost.tweet_id = quote_tweet.tweet_id AND current_user_repost.user_id = :current_user_id', -// { current_user_id } -// ) -// .leftJoinAndMapOne( -// 'user.current_user_follows', -// UserFollows, -// 'current_user_follows', -// 'current_user_follows.follower_id = :current_user_id AND current_user_follows.followed_id = user.id', -// { current_user_id } -// ); -// } -// if (current_user_id) { -// query -// .leftJoinAndMapOne( -// 'tweet.current_user_like', -// TweetLike, -// 'current_user_like', -// 'current_user_like.tweet_id = tweet.tweet_id AND current_user_like.user_id = :current_user_id', -// { current_user_id } -// ) -// .leftJoinAndMapOne( -// 'tweet.current_user_repost', -// TweetRepost, -// 'current_user_repost', -// 'current_user_repost.tweet_id = tweet.tweet_id AND current_user_repost.user_id = :current_user_id', -// { current_user_id } -// ) -// .leftJoinAndMapOne( -// 'user.current_user_follows', -// UserFollows, -// 'current_user_follows', -// 'current_user_follows.follower_id = :current_user_id AND current_user_follows.followed_id = user.id', -// { current_user_id } -// ); -// } diff --git a/src/tweets/tweets.controller.spec.ts b/src/tweets/tweets.controller.spec.ts index 01569f69..5ced533a 100644 --- a/src/tweets/tweets.controller.spec.ts +++ b/src/tweets/tweets.controller.spec.ts @@ -70,17 +70,6 @@ describe('TweetsController', () => { }); }); - describe('getAllTweets', () => { - it('should return undefined (method not implemented)', async () => { - const query_dto = { page: 1, limit: 20 }; - const user_id = 'user-123'; - - const result = await controller.getAllTweets(query_dto as any, user_id); - - expect(result).toBeUndefined(); - }); - }); - describe('getTweetById', () => { it('should return a tweet by id', async () => { const tweet_id = 'tweet-123'; @@ -334,21 +323,6 @@ describe('TweetsController', () => { }); }); - describe('trackTweetView', () => { - it('should track tweet view', async () => { - const tweet_id = 'tweet-123'; - const user_id = 'user-123'; - const mock_response = { success: true }; - - mock_tweets_service.incrementTweetViews.mockResolvedValue(mock_response); - - const result = await controller.trackTweetView(tweet_id, user_id); - - expect(service.incrementTweetViews).toHaveBeenCalledWith(tweet_id); - expect(result).toEqual(mock_response); - }); - }); - describe('bookmarkTweet', () => { it('should bookmark a tweet', async () => { const tweet_id = 'tweet-456'; diff --git a/src/tweets/tweets.controller.ts b/src/tweets/tweets.controller.ts index 4b7a7208..a20fd5d7 100644 --- a/src/tweets/tweets.controller.ts +++ b/src/tweets/tweets.controller.ts @@ -29,16 +29,9 @@ import { } from '@nestjs/swagger'; import { CreateTweetDTO } from './dto/create-tweet.dto'; import { UpdateTweetDTO } from './dto/update-tweet.dto'; -import { UpdateTweetWithQuoteDTO } from './dto/update-tweet-with-quote.dto'; -import { GetTweetsQueryDto } from './dto/get-tweets-query.dto'; -import { GetTweetLikesQueryDto } from './dto/get-tweet-likes-query.dto'; -import { GetTweetRepostsQueryDto } from './dto/get-tweet-reposts-query.dto'; import { GetTweetRepliesQueryDto } from './dto/get-tweet-replies-query.dto'; -import { UploadMediaResponseDTO } from './dto/upload-media.dto'; -import { PaginatedTweetsResponseDTO } from './dto/paginated-tweets-response.dto'; import { PaginatedTweetLikesResponseDTO } from './dto/paginated-tweet-likes-response.dto'; import { PaginatedTweetRepostsResponseDTO } from './dto/paginated-tweet-reposts-response.dto'; -import { PaginatedTweetRepliesResponseDTO } from './dto/paginated-tweet-replies-response.dto'; import { PaginatedBookmarksResponseDTO } from './dto/paginated-bookmarks-response.dto'; import { TweetResponseDTO } from './dto/tweet-response.dto'; import { TweetsService } from './tweets.service'; @@ -58,7 +51,6 @@ import { create_tweet_swagger, delete_repost_swagger, delete_tweet_swagger, - get_all_tweets_swagger, get_tweet_by_id_swagger, get_tweet_likes_swagger, get_tweet_quotes_swagger, @@ -70,10 +62,8 @@ import { quote_tweet_swagger, reply_to_tweet_swagger, repost_tweet_swagger, - track_tweet_view_swagger, unbookmark_tweet_swagger, unlike_tweet_swagger, - update_quote_tweet_swagger, update_tweet_swagger, upload_image_swagger, upload_video_swagger, @@ -107,20 +97,6 @@ export class TweetsController { } } - @ApiOperation(get_all_tweets_swagger.operation) - @ApiOkResponse({ - description: 'Tweets retrieved successfully with pagination metadata', - type: PaginatedTweetsResponseDTO, - }) - @ApiUnauthorizedErrorResponse(ERROR_MESSAGES.INVALID_OR_EXPIRED_TOKEN) - @ApiInternalServerError(ERROR_MESSAGES.INTERNAL_SERVER_ERROR) - @ResponseMessage(SUCCESS_MESSAGES.TWEETS_RETRIEVED) - @Get() - async getAllTweets(@Query() query: GetTweetsQueryDto, @GetUserId() user_id?: string) { - // return await this.tweets_service.getAllTweets(query, user_id); - return; - } - @HttpCode(HttpStatus.OK) @ApiOperation(get_tweet_summary_swagger.operation) @ApiParam(get_tweet_summary_swagger.param) @@ -446,24 +422,6 @@ export class TweetsController { return await this.tweets_service.getTweetReplies(id, user_id, query); } - @ApiOperation(update_quote_tweet_swagger.operation) - @ApiOperation(update_quote_tweet_swagger.operation) - @ApiParam(update_quote_tweet_swagger.param) - @ApiBody({ type: UpdateTweetWithQuoteDTO }) - @ApiOkResponse(update_quote_tweet_swagger.responses.success) - @ApiUnauthorizedErrorResponse(ERROR_MESSAGES.INVALID_OR_EXPIRED_TOKEN) - @ApiForbiddenErrorResponse(ERROR_MESSAGES.USER_NOT_FOUND) - @ApiNotFoundErrorResponse(ERROR_MESSAGES.USER_NOT_FOUND) - @ApiInternalServerError(ERROR_MESSAGES.FAILED_TO_UPDATE_IN_DB) - @ResponseMessage(SUCCESS_MESSAGES.QUOTE_TWEET_UPDATED) - @UseGuards(JwtAuthGuard) - @Patch(':id/quote') - async updateQuoteTweet( - @Param('id', ParseUUIDPipe) id: string, - @Body() update_quote_dto: UpdateTweetWithQuoteDTO, - @GetUserId() user_id: string - ) {} - @HttpCode(HttpStatus.CREATED) @ApiOperation(upload_image_swagger.operation) @ApiConsumes('multipart/form-data') @@ -503,80 +461,4 @@ export class TweetsController { return this.tweets_service.uploadVideo(file); } - - @HttpCode(HttpStatus.OK) - @ApiOperation(track_tweet_view_swagger.operation) - @ApiParam(track_tweet_view_swagger.param) - @ApiOkResponse(track_tweet_view_swagger.responses.success) - @ApiUnauthorizedErrorResponse(ERROR_MESSAGES.INVALID_OR_EXPIRED_TOKEN) - @ApiNotFoundErrorResponse(ERROR_MESSAGES.TWEET_NOT_FOUND) - @ApiInternalServerError(ERROR_MESSAGES.INTERNAL_SERVER_ERROR) - @ResponseMessage(SUCCESS_MESSAGES.TWEET_VIEW_TRACKED) - @UseGuards(OptionalJwtAuthGuard) - @Post(':id/view') - async trackTweetView(@Param('id', ParseUUIDPipe) id: string, @GetUserId() user_id: string) { - return await this.tweets_service.incrementTweetViews(id); - } - - /* Test Profile Functionalities */ - - // @HttpCode(HttpStatus.OK) - // @ApiOperation({ summary: 'Test: Get replies by user ID' }) - // @ApiQuery({ name: 'cursor', required: false, type: String }) - // @ApiQuery({ name: 'limit', required: false, type: Number }) - // @ResponseMessage('User replies retrieved successfully') - // @Get('test/user/:user_id/replies') - // async testGetRepliesByUserId( - // @Param('user_id', ParseUUIDPipe) user_id: string, - // @Query('cursor') cursor?: string, - // @Query('limit') limit?: number, - // @GetUserId() current_user_id?: string - // ) { - // return await this.tweets_service.getRepliesByUserId( - // user_id, - // current_user_id, - // cursor, - // limit ? Number(limit) : 10 - // ); - // } - - // @HttpCode(HttpStatus.OK) - // @ApiOperation({ summary: 'Test: Get media posts by user ID' }) - // @ApiQuery({ name: 'cursor', required: false, type: String }) - // @ApiQuery({ name: 'limit', required: false, type: Number }) - // @ResponseMessage('User media posts retrieved successfully') - // @Get('test/user/:user_id/media') - // async testGetMediaByUserId( - // @Param('user_id', ParseUUIDPipe) user_id: string, - // @Query('cursor') cursor?: string, - // @Query('limit') limit?: number, - // @GetUserId() current_user_id?: string - // ) { - // return await this.tweets_service.getMediaByUserId( - // user_id, - // current_user_id, - // cursor, - // limit ? Number(limit) : 10 - // ); - // } - - // @HttpCode(HttpStatus.OK) - // @ApiOperation({ summary: 'Test: Get liked posts by user ID' }) - // @ApiQuery({ name: 'cursor', required: false, type: String }) - // @ApiQuery({ name: 'limit', required: false, type: Number }) - // @ResponseMessage('User liked posts retrieved successfully') - // @Get('test/user/:user_id/likes') - // async testGetLikedPostsByUserId( - // @Param('user_id', ParseUUIDPipe) user_id: string, - // @Query('cursor') cursor?: string, - // @Query('limit') limit?: number, - // @GetUserId() current_user_id?: string - // ) { - // return await this.tweets_service.getLikedPostsByUserId( - // user_id, - // current_user_id, - // cursor, - // limit ? Number(limit) : 10 - // ); - // } } diff --git a/src/tweets/tweets.module.ts b/src/tweets/tweets.module.ts index 5fb35496..39a2a005 100644 --- a/src/tweets/tweets.module.ts +++ b/src/tweets/tweets.module.ts @@ -1,10 +1,11 @@ -import { Module } from '@nestjs/common'; +import { forwardRef, Module } from '@nestjs/common'; import { TypeOrmModule } from '@nestjs/typeorm'; import { TweetsController } from './tweets.controller'; import { TweetsService } from './tweets.service'; import { TweetsRepository } from './tweets.repository'; import { Tweet, TweetLike, TweetQuote, TweetReply, TweetRepost } from './entities'; import { TweetBookmark } from './entities/tweet-bookmark.entity'; +import { TweetHashtag } from './entities/tweet-hashtag.entity'; import { Hashtag } from './entities/hashtags.entity'; import { UserFollows } from 'src/user/entities/user-follows.entity'; import { PaginationService } from 'src/shared/services/pagination/pagination.service'; @@ -13,9 +14,9 @@ import { UserPostsView } from './entities/user-posts-view.entity'; import { TweetCategory } from './entities/tweet-category.entity'; import { TweetSummary } from './entities/tweet-summary.entity'; import { BackgroundJobsModule } from 'src/background-jobs'; -import { ReplyJobService } from 'src/background-jobs/notifications/reply/reply.service'; -import { TrendService } from 'src/trend/trend.service'; import { HashtagJobService } from 'src/background-jobs/hashtag/hashtag.service'; +import { User } from 'src/user/entities'; +import { DeletedTweetsCleanupService, DeletedTweetsLog } from './deleted-tweets-cleanup.service'; @Module({ imports: [ @@ -26,11 +27,14 @@ import { HashtagJobService } from 'src/background-jobs/hashtag/hashtag.service'; TweetQuote, TweetReply, TweetBookmark, + TweetHashtag, Hashtag, UserFollows, UserPostsView, TweetCategory, TweetSummary, + User, + DeletedTweetsLog, ]), BackgroundJobsModule, ], @@ -41,6 +45,7 @@ import { HashtagJobService } from 'src/background-jobs/hashtag/hashtag.service'; PaginationService, AzureStorageService, HashtagJobService, + DeletedTweetsCleanupService, ], exports: [TweetsService, TweetsRepository], }) diff --git a/src/tweets/tweets.repository.spec.ts b/src/tweets/tweets.repository.spec.ts index 21d33a95..3e032350 100644 --- a/src/tweets/tweets.repository.spec.ts +++ b/src/tweets/tweets.repository.spec.ts @@ -79,6 +79,7 @@ describe('TweetsRepository', () => { createEntityManager: jest.fn(() => ({ createQueryBuilder: jest.fn(() => MOCK_QUERY_BUILDER), })), + query: jest.fn(), }; const MOCK_TWEET_REPOSITORY = { @@ -147,8 +148,10 @@ describe('TweetsRepository', () => { ); pagination_service = module.get(PaginationService); data_source = module.get(DataSource); + }); - // Reset all mocks + beforeEach(() => { + // Clear all mocks before each test jest.clearAllMocks(); // Restore mock implementations after clearAllMocks @@ -158,6 +161,25 @@ describe('TweetsRepository', () => { MOCK_TWEET_CATEGORY_REPOSITORY.createQueryBuilder.mockReturnValue(MOCK_QUERY_BUILDER); MOCK_DATA_SOURCE.createQueryBuilder.mockReturnValue(MOCK_QUERY_BUILDER); + // Restore MOCK_QUERY_BUILDER chain methods + MOCK_QUERY_BUILDER.leftJoinAndSelect.mockReturnThis(); + MOCK_QUERY_BUILDER.leftJoin.mockReturnThis(); + MOCK_QUERY_BUILDER.innerJoin.mockReturnThis(); + MOCK_QUERY_BUILDER.innerJoinAndSelect.mockReturnThis(); + MOCK_QUERY_BUILDER.leftJoinAndMapOne.mockReturnThis(); + MOCK_QUERY_BUILDER.select.mockReturnThis(); + MOCK_QUERY_BUILDER.addSelect.mockReturnThis(); + MOCK_QUERY_BUILDER.where.mockReturnThis(); + MOCK_QUERY_BUILDER.andWhere.mockReturnThis(); + MOCK_QUERY_BUILDER.orderBy.mockReturnThis(); + MOCK_QUERY_BUILDER.addOrderBy.mockReturnThis(); + MOCK_QUERY_BUILDER.limit.mockReturnThis(); + MOCK_QUERY_BUILDER.take.mockReturnThis(); + MOCK_QUERY_BUILDER.setParameter.mockReturnThis(); + MOCK_QUERY_BUILDER.setParameters.mockReturnThis(); + MOCK_QUERY_BUILDER.addCommonTableExpression.mockReturnThis(); + MOCK_QUERY_BUILDER.from.mockReturnThis(); + // Mock repository helper methods to return the query builder jest.spyOn(repository as any, 'attachParentTweetQuery').mockImplementation((q) => q); jest.spyOn(repository as any, 'attachConversationTweetQuery').mockImplementation((q) => q); @@ -165,8 +187,6 @@ describe('TweetsRepository', () => { (q) => q ); jest.spyOn(repository as any, 'attachRepostInfo').mockImplementation((q) => q); - jest.spyOn(repository as any, 'attachRepliedTweetQuery').mockImplementation((q) => q); - jest.spyOn(repository as any, 'attachQuotedTweetQuery').mockImplementation((q) => q); jest.spyOn(repository as any, 'attachUserFollowFlags').mockImplementation( (tweets) => tweets ); @@ -321,64 +341,86 @@ describe('TweetsRepository', () => { expect(result.data[0].parent_tweet_id).toBe('parent123'); expect(result.data[0].tweet_id).toBe('reply1'); }); - }); - describe('getForyouTweets', () => { - it('should return random tweets for you feed', async () => { + it('should filter tweets by since_hours_ago parameter', async () => { const user_id = 'user123'; const cursor = undefined; const limit = 10; + const since_hours_ago = 24; const raw_results = [ create_mock_tweet_data({ tweet_id: 'tweet1', - type: 'tweet', - content: 'Random tweet', - user: { - id: 'user1', - username: 'randomuser', - name: 'Random User', - avatar_url: null, - cover_url: null, - verified: false, - bio: null, - followers: 0, - following: 0, - }, - num_likes: 10, - num_reposts: 5, - num_quotes: 2, - num_replies: 3, - num_views: 50, - created_at: new Date('2024-01-01'), - updated_at: new Date('2024-01-01'), + content: 'Recent tweet', }), ]; MOCK_QUERY_BUILDER.getRawMany.mockResolvedValue(raw_results); - const result = await repository.getForyouTweets(user_id, cursor, limit); + const result = await repository.getFollowingTweets( + user_id, + cursor, + limit, + since_hours_ago + ); expect(result.data).toHaveLength(1); - expect(result.data[0].content).toBe('Random tweet'); - expect(MOCK_QUERY_BUILDER.orderBy).toHaveBeenCalledWith('RANDOM()'); + expect(MOCK_QUERY_BUILDER.andWhere).toHaveBeenCalled(); }); - it('should handle cursor in for you feed', async () => { + it('should handle empty results', async () => { const user_id = 'user123'; - const cursor = '2024-01-01T00:00:00.000Z_tweet123'; - const limit = 10; MOCK_QUERY_BUILDER.getRawMany.mockResolvedValue([]); + MOCK_PAGINATION_SERVICE.generateNextCursor.mockReturnValue(null); - await repository.getForyouTweets(user_id, cursor, limit); + const result = await repository.getFollowingTweets(user_id); - expect(MOCK_QUERY_BUILDER.andWhere).toHaveBeenCalled(); + expect(result.data).toHaveLength(0); + expect(result.pagination.has_more).toBe(false); + expect(result.pagination.next_cursor).toBeNull(); + }); + + it('should handle tweets with all interaction flags', async () => { + const user_id = 'user123'; + + const raw_results = [ + create_mock_tweet_data({ + tweet_id: 'tweet1', + is_liked: true, + is_reposted: true, + is_bookmarked: true, + }), + ]; + + MOCK_QUERY_BUILDER.getRawMany.mockResolvedValue(raw_results); + // Ensure valid cursor gen for non-empty + MOCK_PAGINATION_SERVICE.generateNextCursor.mockReturnValue('next_cursor'); + + const result = await repository.getFollowingTweets(user_id); + + expect(result.data[0].is_liked).toBe(true); + expect(result.data[0].is_reposted).toBe(true); + expect(result.data[0].is_bookmarked).toBe(true); }); }); - describe('getReplies', () => { - // TODO: Implement tests for getReplies method + describe('getReplies - Edge Cases', () => { + it('should handle getTweetsByIds with error', async () => { + const tweet_ids = ['tweet-1']; + const console_error_spy = jest.spyOn(console, 'error').mockImplementation(); + const console_log_spy = jest.spyOn(console, 'log').mockImplementation(); + + MOCK_QUERY_BUILDER.getMany.mockRejectedValue(new Error('Database connection failed')); + + // getTweetsByIds uses getRawMany internally + await expect(repository.getTweetsByIds(tweet_ids)).rejects.toThrow( + 'Database connection failed' + ); + + console_error_spy.mockRestore(); + console_log_spy.mockRestore(); + }); }); describe('getPostsByUserId', () => { @@ -452,10 +494,12 @@ describe('TweetsRepository', () => { it('should handle errors in getPostsByUserId', async () => { const user_id = 'user123'; const error = new Error('Database error'); + const console_error_spy = jest.spyOn(console, 'error').mockImplementation(); MOCK_QUERY_BUILDER.getRawMany.mockRejectedValue(error); await expect(repository.getPostsByUserId(user_id)).rejects.toThrow('Database error'); + console_error_spy.mockRestore(); }); it('should include reposted_by info for reposts', async () => { @@ -511,6 +555,35 @@ describe('TweetsRepository', () => { expect(result.data[0].reposted_by?.id).toBe(user_id); expect(result.data[0].reposted_by?.name).toBe('Reposter User'); }); + + it('should handle large limit values', async () => { + const user_id = 'user123'; + const limit = 100; + + MOCK_QUERY_BUILDER.getRawMany.mockResolvedValue([]); + + const result = await repository.getPostsByUserId(user_id, undefined, undefined, limit); + + expect(MOCK_QUERY_BUILDER.limit).toHaveBeenCalledWith(limit); + }); + + it('should handle posts with no current_user_id', async () => { + const user_id = 'user123'; + + const mock_posts = [ + create_mock_tweet_data({ + tweet_id: 'tweet1', + is_liked: false, + is_reposted: false, + }), + ]; + + MOCK_QUERY_BUILDER.getRawMany.mockResolvedValue(mock_posts); + + const result = await repository.getPostsByUserId(user_id); + + expect(result.data).toHaveLength(1); + }); }); describe('getRepliesByUserId', () => { @@ -548,10 +621,35 @@ describe('TweetsRepository', () => { it('should handle errors in getRepliesByUserId', async () => { const user_id = 'user123'; const error = new Error('Query failed'); + const console_error_spy = jest.spyOn(console, 'error').mockImplementation(); MOCK_QUERY_BUILDER.getRawMany.mockRejectedValue(error); await expect(repository.getRepliesByUserId(user_id)).rejects.toThrow('Query failed'); + console_error_spy.mockRestore(); + }); + + it('should handle empty replies', async () => { + const user_id = 'user123'; + + MOCK_QUERY_BUILDER.getRawMany.mockResolvedValue([]); + MOCK_PAGINATION_SERVICE.generateNextCursor.mockReturnValue(null); + + const result = await repository.getRepliesByUserId(user_id); + + expect(result.data).toHaveLength(0); + expect(result.pagination.has_more).toBe(false); + }); + + it('should handle replies with different limits', async () => { + const user_id = 'user123'; + const limit = 50; + + MOCK_QUERY_BUILDER.getRawMany.mockResolvedValue([]); + + await repository.getRepliesByUserId(user_id, undefined, undefined, limit); + + expect(MOCK_QUERY_BUILDER.limit).toHaveBeenCalledWith(limit); }); }); @@ -620,12 +718,43 @@ describe('TweetsRepository', () => { it('should handle errors in getMediaByUserId', async () => { const user_id = 'user123'; const error = new Error('Media query failed'); + const console_error_spy = jest.spyOn(console, 'error').mockImplementation(); MOCK_QUERY_BUILDER.getRawMany.mockRejectedValue(error); await expect(repository.getMediaByUserId(user_id)).rejects.toThrow( 'Media query failed' ); + console_error_spy.mockRestore(); + }); + + it('should handle media tweets with videos', async () => { + const user_id = 'user123'; + + const mock_media = [ + create_mock_tweet_data({ + tweet_id: 'tweet1', + videos: ['video1.mp4'], + }), + ]; + + MOCK_QUERY_BUILDER.getRawMany.mockResolvedValue(mock_media); + + const result = await repository.getMediaByUserId(user_id); + + expect(result.data).toHaveLength(1); + expect(result.data[0].videos).toContain('video1.mp4'); + }); + + it('should handle empty media results', async () => { + const user_id = 'user123'; + + MOCK_QUERY_BUILDER.getRawMany.mockResolvedValue([]); + + const result = await repository.getMediaByUserId(user_id); + + expect(result.data).toHaveLength(0); + expect(result.pagination.has_more).toBe(false); }); }); @@ -687,12 +816,36 @@ describe('TweetsRepository', () => { it('should handle errors in getLikedPostsByUserId', async () => { const user_id = 'user123'; const error = new Error('Liked posts query failed'); + const console_error_spy = jest.spyOn(console, 'error').mockImplementation(); MOCK_QUERY_BUILDER.getRawMany.mockRejectedValue(error); await expect(repository.getLikedPostsByUserId(user_id)).rejects.toThrow( 'Liked posts query failed' ); + console_error_spy.mockRestore(); + }); + + it('should handle empty liked posts', async () => { + const user_id = 'user123'; + + MOCK_QUERY_BUILDER.getRawMany.mockResolvedValue([]); + + const result = await repository.getLikedPostsByUserId(user_id); + + expect(result.data).toHaveLength(0); + expect(result.pagination.has_more).toBe(false); + }); + + it('should respect limit parameter', async () => { + const user_id = 'user123'; + const limit = 5; + + MOCK_QUERY_BUILDER.getRawMany.mockResolvedValue([]); + + await repository.getLikedPostsByUserId(user_id, undefined, limit); + + expect(MOCK_QUERY_BUILDER.limit).toHaveBeenCalledWith(limit); }); }); @@ -780,6 +933,7 @@ describe('TweetsRepository', () => { it('should handle errors in getReplyWithParentChain', async () => { const tweet_id = 'reply123'; const error = new Error('Chain query failed'); + const console_error_spy = jest.spyOn(console, 'error').mockImplementation(); MOCK_QUERY_RUNNER.query.mockRejectedValue(error); @@ -787,106 +941,7 @@ describe('TweetsRepository', () => { 'Chain query failed' ); expect(MOCK_QUERY_RUNNER.release).toHaveBeenCalled(); - }); - }); - - describe('getRecentTweetsByCategoryIds', () => { - it('should return recent tweets by category IDs', async () => { - const category_ids = ['cat1', 'cat2']; - const user_id = 'user123'; - const options = { limit: 10, since_hours_ago: 24 }; - - const mock_tweets = [ - { - tweet_id: 'tweet1', - content: 'Test tweet', - user: { id: 'other_user', username: 'other' }, - created_at: new Date(), - }, - ]; - - MOCK_QUERY_BUILDER.getMany.mockResolvedValue(mock_tweets); - jest.spyOn(repository, 'attachUserTweetInteractionFlags').mockReturnValue( - MOCK_QUERY_BUILDER as any - ); - - const result = await repository.getRecentTweetsByCategoryIds( - category_ids, - user_id, - options - ); - - expect(result).toBeDefined(); - expect(Array.isArray(result)).toBe(true); - expect(MOCK_TWEET_REPOSITORY.createQueryBuilder).toHaveBeenCalled(); - }); - - it('should use default options when not provided', async () => { - const category_ids = ['cat1']; - const user_id = 'user123'; - - MOCK_QUERY_BUILDER.getMany.mockResolvedValue([]); - jest.spyOn(repository, 'attachUserTweetInteractionFlags').mockReturnValue( - MOCK_QUERY_BUILDER as any - ); - - await repository.getRecentTweetsByCategoryIds(category_ids, user_id); - - expect(MOCK_QUERY_BUILDER.take).toHaveBeenCalledWith(350); // 300 + 50 buffer - }); - - it('should handle errors in getRecentTweetsByCategoryIds', async () => { - const category_ids = ['cat1']; - const user_id = 'user123'; - const error = new Error('Database error'); - - MOCK_QUERY_BUILDER.getMany.mockRejectedValue(error); - jest.spyOn(repository, 'attachUserTweetInteractionFlags').mockReturnValue( - MOCK_QUERY_BUILDER as any - ); - - await expect( - repository.getRecentTweetsByCategoryIds(category_ids, user_id) - ).rejects.toThrow('Database error'); - }); - }); - - describe('getTweetsCategories', () => { - it('should return categories for tweet IDs', async () => { - const tweet_ids = ['tweet1', 'tweet2']; - const mock_categories = [ - { tweet_id: 'tweet1', category_id: 1, percentage: 0.8 }, - { tweet_id: 'tweet1', category_id: 2, percentage: 0.2 }, - { tweet_id: 'tweet2', category_id: 3, percentage: 1.0 }, - ]; - - MOCK_QUERY_BUILDER.getMany.mockResolvedValue(mock_categories); - - const result = await repository.getTweetsCategories(tweet_ids); - - expect(result).toBeDefined(); - expect(MOCK_TWEET_CATEGORY_REPOSITORY.createQueryBuilder).toHaveBeenCalled(); - }); - - it('should return empty object when no categories found', async () => { - const tweet_ids = ['tweet1']; - - MOCK_QUERY_BUILDER.getMany.mockResolvedValue([]); - - // The current implementation has a bug with empty arrays (reduce without initial value) - // This test documents the bug - it should return {} but instead throws - await expect(repository.getTweetsCategories(tweet_ids)).rejects.toThrow( - 'Reduce of empty array with no initial value' - ); - }); - - it('should handle errors in getTweetsCategories', async () => { - const tweet_ids = ['tweet1']; - const error = new Error('Query error'); - - MOCK_QUERY_BUILDER.getMany.mockRejectedValue(error); - - await expect(repository.getTweetsCategories(tweet_ids)).rejects.toThrow('Query error'); + console_error_spy.mockRestore(); }); }); @@ -985,30 +1040,6 @@ describe('TweetsRepository', () => { 'Database connection failed' ); }); - - it('should handle errors in getForyouTweets', async () => { - const error = new Error('Random query failed'); - MOCK_QUERY_BUILDER.getRawMany.mockRejectedValue(error); - - await expect(repository.getForyouTweets('user123')).rejects.toThrow( - 'Random query failed' - ); - }); - }); - - describe('Helper Methods - attachQuotedTweetQuery', () => { - beforeEach(() => { - jest.spyOn(repository as any, 'attachQuotedTweetQuery').mockRestore(); - }); - - it('should attach quoted tweet query', () => { - const query = MOCK_QUERY_BUILDER as any; - - const result = (repository as any).attachQuotedTweetQuery(query); - - expect(result).toBe(query); - expect(query.addSelect).toHaveBeenCalled(); - }); }); describe('Helper Methods - attachRepostInfo', () => { @@ -1055,4 +1086,358 @@ describe('TweetsRepository', () => { expect(result).toBe(query); }); }); + + describe('getTweetsByIds with current_user_id', () => { + it('should get tweets by IDs with interaction flags when user_id provided', async () => { + const tweet_ids = ['tweet-1', 'tweet-2']; + const current_user_id = 'user-123'; + + const mock_tweets = [ + create_mock_tweet_data({ tweet_id: 'tweet-1' }), + create_mock_tweet_data({ tweet_id: 'tweet-2' }), + ]; + + (MOCK_TWEET_REPOSITORY.createQueryBuilder as jest.Mock).mockReturnValue( + MOCK_QUERY_BUILDER + ); + MOCK_QUERY_BUILDER.getMany.mockResolvedValue(mock_tweets); + jest.spyOn(repository as any, 'attachUserTweetInteractionFlags').mockReturnValue( + MOCK_QUERY_BUILDER + ); + jest.spyOn(repository as any, 'incrementTweetViewsAsync').mockResolvedValue(undefined); + + const result = await repository.getTweetsByIds(tweet_ids, current_user_id); + + expect(result).toHaveLength(2); + expect(repository['attachUserTweetInteractionFlags']).toHaveBeenCalledWith( + MOCK_QUERY_BUILDER, + current_user_id, + 'tweet' + ); + }); + + it('should return empty array when tweet_ids is empty', async () => { + const result = await repository.getTweetsByIds([]); + + expect(result).toEqual([]); + }); + }); + + describe('incrementTweetViewsAsync', () => { + it('should call database query with tweet IDs', async () => { + const tweet_ids = ['tweet-1', 'tweet-2']; + MOCK_DATA_SOURCE.query.mockResolvedValue(undefined); + + await (repository as any).incrementTweetViewsAsync(tweet_ids); + + expect(MOCK_DATA_SOURCE.query).toHaveBeenCalledWith( + 'SELECT increment_tweet_views_batch($1::uuid[])', + [tweet_ids] + ); + }); + + it('should return early for empty array', async () => { + await (repository as any).incrementTweetViewsAsync([]); + + expect(MOCK_DATA_SOURCE.query).not.toHaveBeenCalled(); + }); + + it('should handle errors gracefully', async () => { + const console_error_spy = jest.spyOn(console, 'error').mockImplementation(); + MOCK_DATA_SOURCE.query.mockRejectedValue(new Error('DB Error')); + + await expect( + (repository as any).incrementTweetViewsAsync(['tweet-1']) + ).resolves.toBeUndefined(); + + console_error_spy.mockRestore(); + }); + }); + + describe('attachRepostInfo', () => { + it('should return the query builder', () => { + const query = MOCK_QUERY_BUILDER as any; + + const result = (repository as any).attachRepostInfo(query); + + expect(result).toBe(query); + }); + + it('should work with custom table alias', () => { + const query = MOCK_QUERY_BUILDER as any; + + const result = (repository as any).attachRepostInfo(query, 'custom_tweet'); + + expect(result).toBe(query); + }); + }); + + describe('attachParentTweetQuery', () => { + it('should return the query builder without user_id', () => { + const query = MOCK_QUERY_BUILDER as any; + + const result = (repository as any).attachParentTweetQuery(query); + + expect(result).toBe(query); + }); + + it('should return the query builder with user_id', () => { + const query = MOCK_QUERY_BUILDER as any; + + const result = (repository as any).attachParentTweetQuery(query, 'user-123'); + + expect(result).toBe(query); + }); + + it('should work with custom table alias', () => { + const query = MOCK_QUERY_BUILDER as any; + + const result = (repository as any).attachParentTweetQuery(query, 'user-123', 'custom'); + + expect(result).toBe(query); + }); + }); + + describe('attachConversationTweetQuery', () => { + it('should return the query builder without user_id', () => { + const query = MOCK_QUERY_BUILDER as any; + + const result = (repository as any).attachConversationTweetQuery(query); + + expect(result).toBe(query); + }); + + it('should return the query builder with user_id', () => { + const query = MOCK_QUERY_BUILDER as any; + + const result = (repository as any).attachConversationTweetQuery(query, 'user-123'); + + expect(result).toBe(query); + }); + + it('should work with custom table alias', () => { + const query = MOCK_QUERY_BUILDER as any; + + const result = (repository as any).attachConversationTweetQuery( + query, + 'user-123', + 'custom' + ); + + expect(result).toBe(query); + }); + }); + + describe('attachUserInteractionBooleanFlags', () => { + it('should return the query builder with user_id', () => { + const query = MOCK_QUERY_BUILDER as any; + + const result = (repository as any).attachUserInteractionBooleanFlags(query, 'user-123'); + + expect(result).toBe(query); + }); + + it('should return the query builder without user_id', () => { + const query = MOCK_QUERY_BUILDER as any; + + const result = (repository as any).attachUserInteractionBooleanFlags(query); + + expect(result).toBe(query); + }); + + it('should work with custom columns', () => { + const query = MOCK_QUERY_BUILDER as any; + + const result = (repository as any).attachUserInteractionBooleanFlags( + query, + 'user-123', + 'custom.user_id', + 'custom.tweet_id' + ); + + expect(result).toBe(query); + }); + }); + + describe('attachUserFollowFlags', () => { + beforeEach(() => { + jest.spyOn(repository as any, 'attachUserFollowFlags').mockRestore(); + }); + + it('should attach follow flags to tweets', () => { + const tweets = [ + { + tweet_id: 'tweet-1', + user: { id: 'user-1' }, + is_following: true, + is_follower: false, + }, + ]; + + const result = (repository as any).attachUserFollowFlags(tweets); + + expect(result).toBeDefined(); + expect(result[0].user.is_following).toBe(true); + expect(result[0].user.is_follower).toBe(false); + }); + + it('should handle tweets with parent_tweet', () => { + const tweets = [ + { + tweet_id: 'tweet-1', + user: { id: 'user-1' }, + is_following: false, + is_follower: false, + parent_tweet: { + user: { id: 'user-2' }, + is_following: true, + is_follower: true, + }, + }, + ]; + + const result = (repository as any).attachUserFollowFlags(tweets); + + expect(result[0].parent_tweet.user.is_following).toBe(true); + expect(result[0].parent_tweet.user.is_follower).toBe(true); + }); + + it('should handle tweets with conversation_tweet', () => { + const tweets = [ + { + tweet_id: 'tweet-1', + user: { id: 'user-1' }, + is_following: false, + is_follower: false, + conversation_tweet: { + user: { id: 'user-3' }, + is_following: false, + is_follower: true, + }, + }, + ]; + + const result = (repository as any).attachUserFollowFlags(tweets); + + expect(result[0].conversation_tweet.user.is_follower).toBe(true); + }); + + it('should handle empty tweets array', () => { + const result = (repository as any).attachUserFollowFlags([]); + + expect(result).toEqual([]); + }); + }); + + describe('attachParentTweetQuery with user_id (nested function coverage)', () => { + beforeEach(() => { + // Restore real implementation to test nested get_interactions function + jest.spyOn(repository as any, 'attachParentTweetQuery').mockRestore(); + }); + + it('should call nested get_interactions when user_id is provided', () => { + const query = MOCK_QUERY_BUILDER as any; + const user_id = 'user-123'; + + const result = (repository as any).attachParentTweetQuery(query, user_id); + + expect(result).toBe(query); + expect(query.addSelect).toHaveBeenCalled(); + expect(query.setParameter).toHaveBeenCalledWith('current_user_id', user_id); + }); + + it('should not set parameter when user_id is not provided', () => { + const query = MOCK_QUERY_BUILDER as any; + + const result = (repository as any).attachParentTweetQuery(query); + + expect(result).toBe(query); + expect(query.addSelect).toHaveBeenCalled(); + expect(query.setParameter).not.toHaveBeenCalled(); + }); + + it('should work with different table aliases', () => { + const query = MOCK_QUERY_BUILDER as any; + const user_id = 'user-123'; + const table_alias = 'custom_table'; + + const result = (repository as any).attachParentTweetQuery(query, user_id, table_alias); + + expect(result).toBe(query); + expect(query.addSelect).toHaveBeenCalled(); + }); + }); + + describe('attachConversationTweetQuery with user_id (nested function coverage)', () => { + beforeEach(() => { + // Restore real implementation to test nested get_interactions function + jest.spyOn(repository as any, 'attachConversationTweetQuery').mockRestore(); + }); + + it('should call nested get_interactions when user_id is provided', () => { + const query = MOCK_QUERY_BUILDER as any; + const user_id = 'user-123'; + + const result = (repository as any).attachConversationTweetQuery(query, user_id); + + expect(result).toBe(query); + expect(query.addSelect).toHaveBeenCalled(); + expect(query.setParameter).toHaveBeenCalledWith('current_user_id', user_id); + }); + + it('should not set parameter when user_id is not provided', () => { + const query = MOCK_QUERY_BUILDER as any; + + const result = (repository as any).attachConversationTweetQuery(query); + + expect(result).toBe(query); + expect(query.addSelect).toHaveBeenCalled(); + expect(query.setParameter).not.toHaveBeenCalled(); + }); + + it('should work with different table aliases', () => { + const query = MOCK_QUERY_BUILDER as any; + const user_id = 'user-123'; + const table_alias = 'custom_table'; + + const result = (repository as any).attachConversationTweetQuery( + query, + user_id, + table_alias + ); + + expect(result).toBe(query); + expect(query.addSelect).toHaveBeenCalled(); + }); + }); + + describe('attachUserInteractionBooleanFlags with real implementation', () => { + beforeEach(() => { + // Restore real implementation + jest.spyOn(repository as any, 'attachUserInteractionBooleanFlags').mockRestore(); + }); + + it('should add all interaction selects when user_id is provided', () => { + const query = MOCK_QUERY_BUILDER as any; + const user_id = 'user-123'; + + const result = (repository as any).attachUserInteractionBooleanFlags(query, user_id); + + expect(result).toBe(query); + // Should call addSelect 5 times (is_liked, is_reposted, is_bookmarked, is_following, is_follower) + expect(query.addSelect).toHaveBeenCalled(); + expect(query.setParameter).toHaveBeenCalledWith('current_user_id', user_id); + }); + + it('should not add selects when user_id is not provided', () => { + const query = MOCK_QUERY_BUILDER as any; + jest.clearAllMocks(); + + const result = (repository as any).attachUserInteractionBooleanFlags(query); + + expect(result).toBe(query); + expect(query.addSelect).not.toHaveBeenCalled(); + expect(query.setParameter).not.toHaveBeenCalled(); + }); + }); }); diff --git a/src/tweets/tweets.repository.ts b/src/tweets/tweets.repository.ts index 28ec8f3e..0cdfcb61 100644 --- a/src/tweets/tweets.repository.ts +++ b/src/tweets/tweets.repository.ts @@ -3,21 +3,19 @@ import { Tweet, TweetLike, TweetReply, TweetRepost } from './entities'; import { TweetBookmark } from './entities/tweet-bookmark.entity'; import { InjectRepository } from '@nestjs/typeorm'; import { Injectable } from '@nestjs/common'; -import { TimelineResponseDto } from 'src/timeline/dto/timeline-response.dto'; -import { TimelinePaginationDto } from 'src/timeline/dto/timeline-pagination.dto'; import { TweetResponseDTO } from './dto'; -import { TweetType } from 'src/shared/enums/tweet-types.enum'; import { PaginationService } from 'src/shared/services/pagination/pagination.service'; import { plainToInstance } from 'class-transformer'; -import { User, UserFollows } from 'src/user/entities'; +import { UserFollows } from 'src/user/entities'; import { getReplyWithParentChainQuery } from './queries/reply-parent-chain.query'; -import { getPostsByUserIdQuery } from './queries/get-posts-by-userId.query'; import { SelectQueryBuilder } from 'typeorm/browser'; import { UserPostsView } from './entities/user-posts-view.entity'; -import { getFollowingTweetsQuery } from './queries/get-following-tweets.query'; -import { getForyouTweetsQuery } from './queries/get-foryou-tweets.query'; import { TweetCategory } from './entities/tweet-category.entity'; import { tweet_fields_slect } from './queries/tweet-fields-select.query'; +import { + getPostsByUserIdProfileQuery, + getPostsByUserIdProfileQueryWithoutView, +} from './queries/get-posts-profile-view.query'; @Injectable() export class TweetsRepository extends Repository { @@ -32,13 +30,27 @@ export class TweetsRepository extends Repository { @InjectRepository(TweetCategory) private readonly tweet_category_repository: Repository, private readonly paginate_service: PaginationService, - private data_source: DataSource, + private readonly data_source: DataSource, @InjectRepository(UserPostsView) - private user_posts_view_repository: Repository + private readonly user_posts_view_repository: Repository ) { super(Tweet, data_source.createEntityManager()); } + private async incrementTweetViewsAsync(tweet_ids: string[]): Promise { + if (!tweet_ids.length) return; + + try { + // Call PostgreSQL function to increment views in batch + await this.data_source.query('SELECT increment_tweet_views_batch($1::uuid[])', [ + tweet_ids, + ]); + } catch (error) { + // Log error but don't fail the request + console.error('Failed to increment tweet views:', error); + } + } + async getTweetsByIds( tweet_ids: string[], current_user_id?: string @@ -56,6 +68,9 @@ export class TweetsRepository extends Repository { const tweets = await query.getMany(); + // Increment views asynchronously (don't await) + this.incrementTweetViewsAsync(tweet_ids).catch(() => {}); + return plainToInstance(TweetResponseDTO, tweets, { excludeExtraneousValues: true, }); @@ -81,7 +96,6 @@ export class TweetsRepository extends Repository { .where( new Brackets((qb) => qb - .where( 'tweet.profile_user_id IN (SELECT followed_id FROM user_follows WHERE follower_id = :user_id)', { user_id } @@ -96,8 +110,51 @@ export class TweetsRepository extends Repository { .andWhere( 'tweet.profile_user_id NOT IN (SELECT muted_id FROM user_mutes WHERE muter_id = :user_id)', { user_id } + ) + .andWhere( + 'tweet.tweet_author_id NOT IN (SELECT blocked_id FROM user_blocks WHERE blocker_id = :user_id)', + { user_id } + ) + .andWhere( + new Brackets((qb) => + qb + .where('tweet.conversation_user_id IS NULL') + .orWhere( + 'tweet.conversation_user_id NOT IN (SELECT muted_id FROM user_mutes WHERE muter_id = :user_id)', + { user_id } + ) + ) + ) + .andWhere( + new Brackets((qb) => + qb + .where('tweet.parent_user_id IS NULL') + .orWhere( + 'tweet.parent_user_id NOT IN (SELECT muted_id FROM user_mutes WHERE muter_id = :user_id)', + { user_id } + ) + ) + ) + .andWhere( + new Brackets((qb) => + qb + .where('tweet.conversation_user_id IS NULL') + .orWhere( + 'tweet.conversation_user_id NOT IN (SELECT blocked_id FROM user_blocks WHERE blocker_id = :user_id)', + { user_id } + ) + ) + ) + .andWhere( + new Brackets((qb) => + qb + .where('tweet.parent_user_id IS NULL') + .orWhere( + 'tweet.parent_user_id NOT IN (SELECT blocked_id FROM user_blocks WHERE blocker_id = :user_id)', + { user_id } + ) + ) ); - let query = this.user_posts_view_repository.manager .createQueryBuilder() .addCommonTableExpression(cte_query.getQuery(), 'filtered_tweets') @@ -140,6 +197,8 @@ export class TweetsRepository extends Repository { 'ranked.images AS images', 'ranked.videos AS videos', 'ranked.num_likes AS num_likes', + 'ranked.num_bookmarks AS num_bookmarks', + 'ranked.mentions AS mentions', 'ranked.num_reposts AS num_reposts', 'ranked.num_views AS num_views', 'ranked.num_quotes AS num_quotes', @@ -147,6 +206,7 @@ export class TweetsRepository extends Repository { 'ranked.created_at AS created_at', 'ranked.updated_at AS updated_at', 'ranked.reposted_by_name AS reposted_by_name', + 'ranked.reposted_by_username AS reposted_by_usernname', 'ranked.parent_id AS parent_id', 'ranked.conversation_id AS conversation_id', 'ranked.group_id AS group_id', @@ -192,6 +252,10 @@ export class TweetsRepository extends Repository { let tweets = await query.getRawMany(); + // Increment views for fetched tweets + const tweet_ids = tweets.map((t) => t.tweet_id).filter(Boolean); + this.incrementTweetViewsAsync(tweet_ids).catch(() => {}); + tweets = this.attachUserFollowFlags(tweets); const tweet_dtos = tweets.map((tweet) => @@ -199,120 +263,8 @@ export class TweetsRepository extends Repository { excludeExtraneousValues: true, }) ); - // Debugging - - // tweets.forEach((t, i) => { - // console.log(i, ': ', { - // tweet_id: tweets[i].tweet_id, - // conversation_id: tweets[i].debug_conversation_id, - // parent_id: tweets[i].debug_parent_id, - // group_id: tweets[i].group_id, - // repost_id: tweets[i].repost_id, - // rn: tweets[i].debug_rn, - // }); - // }); - const next_cursor = this.paginate_service.generateNextCursor(tweets, 'post_date', 'id'); - - return { - data: tweet_dtos, - pagination: { - next_cursor, - has_more: tweet_dtos.length === limit, - }, - }; - } catch (error) { - console.error(error); - throw error; - } - } - //TODO: This will be changed in next pushes, just template response for front - async getForyouTweets( - user_id: string, - cursor?: string, - limit: number = 20 - ): Promise<{ - data: TweetResponseDTO[]; - pagination: { next_cursor: string | null; has_more: boolean }; - }> { - try { - let query = this.user_posts_view_repository - .createQueryBuilder('tweet') - .select([ - 'tweet.tweet_id AS tweet_id', - 'tweet.profile_user_id AS profile_user_id', - 'tweet.tweet_author_id AS tweet_author_id', - 'tweet.repost_id AS repost_id', - 'tweet.post_type AS post_type', - 'tweet.type AS type', - 'tweet.content AS content', - 'tweet.images AS images', - 'tweet.videos AS videos', - 'tweet.post_date AS post_date', - 'tweet.num_likes AS num_likes', - 'tweet.num_reposts AS num_reposts', - 'tweet.num_views AS num_views', - 'tweet.num_quotes AS num_quotes', - 'tweet.num_replies AS num_replies', - 'tweet.created_at AS created_at', - 'tweet.updated_at AS updated_at', - - `json_build_object( - 'id', tweet.tweet_author_id, - 'username', tweet.username, - 'name', tweet.name, - 'avatar_url', tweet.avatar_url, - 'cover_url', tweet.cover_url, - 'verified', tweet.verified, - 'bio', tweet.bio, - 'followers', tweet.followers, - 'following', tweet.following - ) AS user`, - ]) - .where(`tweet.type='tweet'`) - - // EXCLUDE MUTED USERS - .andWhere( - 'tweet.profile_user_id NOT IN (SELECT muted_id FROM user_mutes WHERE muter_id = :user_id)', - { user_id } - ) - - // FAST RANDOM ORDERING - .orderBy('RANDOM()') - - .limit(limit); - - // Reuse same attach methods - // query = this.attachQuotedTweetQuery(query); - query = this.attachUserInteractionBooleanFlags( - query, - user_id, - 'tweet.tweet_author_id', - 'tweet.tweet_id' - ); - query = this.attachRepostInfo(query); - // query = this.attachRepliedTweetQuery(query); - - query = this.paginate_service.applyCursorPagination( - query, - cursor, - 'tweet', - 'post_date', - 'tweet_id' - ); - - const tweets = await query.getRawMany(); - - const tweet_dtos = tweets.map((t) => - plainToInstance(TweetResponseDTO, t, { - excludeExtraneousValues: true, - }) - ); - const next_cursor = this.paginate_service.generateNextCursor( - tweets, - 'post_date', - 'tweet_id' - ); + const next_cursor = this.paginate_service.generateNextCursor(tweets, 'post_date', 'id'); return { data: tweet_dtos, @@ -327,285 +279,7 @@ export class TweetsRepository extends Repository { } } - //just for now, till we make refactoring for tweets mapper - private mapRawTweetsToRepliesDTOs(raw_results: any[]): TweetResponseDTO[] { - return raw_results.map((row) => { - const tweet: TweetResponseDTO = { - tweet_id: row.tweet_tweet_id, - type: row.tweet_type as TweetType, - content: row.tweet_content, - // conversation_id: row.conversation_root_id, - images: row.tweet_images || [], - videos: row.tweet_videos || [], - user: { - id: row.user_id, - name: row.user_name, - username: row.user_username, - avatar_url: row.user_avatar_url, - verified: row.user_verified, - bio: row.user_bio, - cover_url: row.user_cover_url, - followers: row.user_followers, - following: row.user_following, - }, - likes_count: row.tweet_num_likes, - reposts_count: row.tweet_num_reposts, - quotes_count: row.tweet_num_quotes, - replies_count: row.tweet_num_replies, - views_count: row.tweet_num_views, - bookmarks_count: row.tweet_num_bookmarks || 0, - is_liked: row.is_liked === true, - is_reposted: row.is_reposted === true, - is_bookmarked: row.is_bookmarked === true, - created_at: row.tweet_created_at, - updated_at: row.tweet_updated_at, - }; - - // For replies endpoint, we only include parent_tweet_id but skip parent_tweet object - // This keeps the response clean and allows client to fetch parent details separately if needed - if (row.parent_tweet_id) { - tweet.parent_tweet_id = row.parent_tweet_id; - // Intentionally skipping parent_tweet object to keep replies response simple - } - - // reposted_by info if this is a repost (though unlikely for replies) - if (row.repost_id && row.repost_user_id) { - tweet.reposted_by = { - repost_id: row.repost_id, - id: row.repost_user_id, - name: row.repost_user_name, - reposted_at: row.repost_created_at, - }; - } - - // Attach first nested reply from original tweet owner (if exists) - if (row.nested_reply && typeof row.nested_reply === 'object') { - tweet.replies = [ - { - tweet_id: row.nested_reply.tweet_id, - type: row.nested_reply.type as TweetType, - content: row.nested_reply.content, - images: row.nested_reply.images || [], - videos: row.nested_reply.videos || [], - parent_tweet_id: row.nested_reply.parent_tweet_id, - user: row.nested_reply.user, - likes_count: row.nested_reply.likes_count, - reposts_count: row.nested_reply.reposts_count, - quotes_count: row.nested_reply.quotes_count, - replies_count: row.nested_reply.replies_count, - views_count: row.nested_reply.views_count, - bookmarks_count: row.nested_reply.bookmarks_count || 0, - is_liked: row.nested_reply.nested_is_liked === true, - is_reposted: row.nested_reply.nested_is_reposted === true, - is_bookmarked: row.nested_reply.nested_is_bookmarked === true, - created_at: row.nested_reply.created_at, - updated_at: row.nested_reply.updated_at, - }, - ]; - } - - return tweet; - }); - } - - async getReplies( - tweet_id: string, - user_id: string | undefined, - pagination: TimelinePaginationDto - ): Promise<{ tweets: TweetResponseDTO[]; next_cursor: string | null }> { - // First get the original tweet owner - const original_tweet = await this.tweet_repository.findOne({ - where: { tweet_id }, - select: ['user_id'], - }); - - if (!original_tweet) { - return { tweets: [], next_cursor: null }; - } - - const original_tweet_owner_id = original_tweet.user_id; - - // Build the nested replies subquery (second-level replies from owner) - const get_user_interactions = (prefix: string) => { - if (!user_id) { - return ` - '${prefix}_is_liked', FALSE, - '${prefix}_is_reposted', FALSE, - '${prefix}_is_bookmarked', FALSE, - `; - } - return ` - '${prefix}_is_liked', EXISTS( - SELECT 1 FROM tweet_likes - WHERE tweet_likes.tweet_id = ${prefix}_tweet.tweet_id - AND tweet_likes.user_id = :user_id - ), - '${prefix}_is_reposted', EXISTS( - SELECT 1 FROM tweet_reposts - WHERE tweet_reposts.tweet_id = ${prefix}_tweet.tweet_id - AND tweet_reposts.user_id = :user_id - ), - '${prefix}_is_bookmarked', EXISTS( - SELECT 1 FROM tweet_bookmarks - WHERE tweet_bookmarks.tweet_id = ${prefix}_tweet.tweet_id - AND tweet_bookmarks.user_id = :user_id - ), - `; - }; - - // Note: I will skip parent object data for replies to keep response clean as the front will have that info already - const query_builder = this.tweet_repository - .createQueryBuilder('tweet') - .leftJoinAndSelect('tweet.user', 'user') - .leftJoin('tweet_replies', 'reply', 'reply.reply_tweet_id = tweet.tweet_id') - .addSelect( - `CASE - WHEN reply.reply_tweet_id IS NOT NULL THEN 'reply' - ELSE 'tweet' - END`, - 'tweet_type' - ) - .addSelect('reply.original_tweet_id', 'parent_tweet_id') - .addSelect( - `( - WITH RECURSIVE conversation_tree AS ( - SELECT - reply.reply_tweet_id, - reply.original_tweet_id, - reply.original_tweet_id as root_id, - 1 as depth - FROM tweet_replies reply - WHERE reply.reply_tweet_id = tweet.tweet_id - - UNION ALL - - SELECT - ct.reply_tweet_id, - tr.original_tweet_id, - tr.original_tweet_id, - ct.depth + 1 - FROM conversation_tree ct - INNER JOIN tweet_replies tr ON ct.root_id = tr.reply_tweet_id - WHERE ct.depth < 100 - ) - SELECT root_id - FROM conversation_tree - ORDER BY depth DESC - LIMIT 1 - )`, - 'conversation_root_id' - ) - // Add first nested reply from owner (if exists) - .addSelect( - `( - SELECT json_build_object( - 'tweet_id', nested_tweet.tweet_id, - 'type', 'reply', - 'content', nested_tweet.content, - 'images', nested_tweet.images, - 'videos', nested_tweet.videos, - 'parent_tweet_id', nested_reply.original_tweet_id, - 'user', json_build_object( - 'id', nested_user.id, - 'name', nested_user.name, - 'username', nested_user.username, - 'avatar_url', nested_user.avatar_url, - 'verified', nested_user.verified, - 'bio', nested_user.bio, - 'cover_url', nested_user.cover_url, - 'followers', nested_user.followers, - 'following', nested_user.following - ), - 'likes_count', nested_tweet.num_likes, - 'reposts_count', nested_tweet.num_reposts, - 'quotes_count', nested_tweet.num_quotes, - 'replies_count', nested_tweet.num_replies, - 'views_count', nested_tweet.num_views, - 'bookmarks_count', nested_tweet.num_bookmarks, - ${get_user_interactions('nested')} - 'created_at', nested_tweet.created_at, - 'updated_at', nested_tweet.updated_at - ) - FROM tweet_replies nested_reply - INNER JOIN tweets nested_tweet ON nested_reply.reply_tweet_id = nested_tweet.tweet_id - INNER JOIN "user" nested_user ON nested_tweet.user_id = nested_user.id - WHERE nested_reply.original_tweet_id = tweet.tweet_id - AND nested_tweet.user_id = :original_tweet_owner_id - ORDER BY nested_tweet.created_at ASC - LIMIT 1 - )`, - 'nested_reply' - ) - .where('reply.original_tweet_id = :tweet_id') - .setParameter('tweet_id', tweet_id) - .setParameter('original_tweet_owner_id', original_tweet_owner_id) - .orderBy('tweet.created_at', 'DESC') - .limit(pagination.limit); - - // Add user-specific queries only if user is authenticated - if (user_id) { - query_builder - .addSelect( - `EXISTS( - SELECT 1 FROM tweet_likes - WHERE tweet_likes.tweet_id = tweet.tweet_id - AND tweet_likes.user_id = :user_id - )`, - 'is_liked' - ) - .addSelect( - `EXISTS( - SELECT 1 FROM tweet_reposts - WHERE tweet_reposts.tweet_id = tweet.tweet_id - AND tweet_reposts.user_id = :user_id - )`, - 'is_reposted' - ) - .addSelect( - `EXISTS( - SELECT 1 FROM tweet_bookmarks - WHERE tweet_bookmarks.tweet_id = tweet.tweet_id - AND tweet_bookmarks.user_id = :user_id - )`, - 'is_bookmarked' - ) - .andWhere( - `tweet.user_id NOT IN( - SELECT muted_id - FROM user_mutes - WHERE muter_id = :user_id - )` - ) - .setParameter('user_id', user_id); - } else { - query_builder - .addSelect('FALSE', 'is_liked') - .addSelect('FALSE', 'is_reposted') - .addSelect('FALSE', 'is_bookmarked'); - } - - if (pagination.cursor) { - const [cursor_timestamp, cursor_id] = pagination.cursor.split('_'); - if (cursor_timestamp && cursor_id) { - query_builder.andWhere( - '(tweet.created_at < :cursor_timestamp OR (tweet.created_at = :cursor_timestamp AND tweet.tweet_id < :cursor_id))', - { cursor_timestamp, cursor_id } - ); - } - } - - const raw_results = await query_builder.getRawMany(); - const tweets = this.mapRawTweetsToRepliesDTOs(raw_results); - - const next_cursor = - tweets.length > 0 && tweets.length === pagination.limit - ? `${tweets[tweets.length - 1].created_at.toISOString()}_${tweets[tweets.length - 1].tweet_id}` - : null; - - return { tweets, next_cursor }; - } - - /**************************** Alyaa ****************************/ + /**************************** User Tabs ****************************/ async getPostsByUserId( user_id: string, current_user_id?: string, @@ -619,47 +293,20 @@ export class TweetsRepository extends Repository { }; }> { try { - let query = this.user_posts_view_repository - .createQueryBuilder('tweet') - .select([ - 'tweet.tweet_id AS tweet_id', - 'tweet.profile_user_id AS profile_user_id', - 'tweet.tweet_author_id AS tweet_author_id', - 'tweet.repost_id AS repost_id', - 'tweet.post_type AS post_type', - 'tweet.type AS type', - 'tweet.content AS content', - 'tweet.type AS type', - 'tweet.post_date AS post_date', - 'tweet.images AS images', - 'tweet.videos AS videos', - 'tweet.num_likes AS num_likes', - 'tweet.num_reposts AS num_reposts', - 'tweet.num_views AS num_views', - 'tweet.num_quotes AS num_quotes', - 'tweet.num_replies AS num_replies', - 'tweet.created_at AS created_at', - 'tweet.post_date AS post_date', - 'tweet.updated_at AS updated_at', - `json_build_object( - 'id', tweet.tweet_author_id, - 'username', tweet.username, - 'name', tweet.name, - 'avatar_url', tweet.avatar_url, - 'cover_url', tweet.cover_url, - 'verified', tweet.verified, - 'bio', tweet.bio, - 'followers', tweet.followers, - 'following', tweet.following - ) AS user`, - ]) - .where('tweet.profile_user_id = :user_id', { user_id }) + let query = this.user_posts_view_repository.createQueryBuilder('tweet'); + + query = getPostsByUserIdProfileQuery(query, user_id); + + query = query .andWhere('tweet.type != :type', { type: 'reply' }) .orderBy('tweet.post_date', 'DESC') .addOrderBy('tweet.tweet_id', 'DESC') .limit(limit); - query = this.attachQuotedTweetQuery(query); + query = this.attachParentTweetQuery(query, current_user_id, 'tweet'); + query = this.attachConversationTweetQuery(query, current_user_id, 'tweet'); + + query = this.attachRepostInfo(query, 'tweet'); query = this.attachUserInteractionBooleanFlags( query, @@ -677,6 +324,11 @@ export class TweetsRepository extends Repository { ); let tweets = await query.getRawMany(); + + // Increment views for fetched posts + const tweet_ids = tweets.map((t) => t.tweet_id).filter(Boolean); + this.incrementTweetViewsAsync(tweet_ids).catch(() => {}); + tweets = this.attachUserFollowFlags(tweets); const tweet_dtos = tweets.map((reply) => @@ -717,40 +369,11 @@ export class TweetsRepository extends Repository { }; }> { try { - let query = this.user_posts_view_repository - .createQueryBuilder('tweet') - .select([ - 'tweet.tweet_id AS tweet_id', - 'tweet.profile_user_id AS profile_user_id', - 'tweet.tweet_author_id AS tweet_author_id', - 'tweet.repost_id AS repost_id', - 'tweet.post_type AS post_type', - 'tweet.type AS type', - 'tweet.content AS content', - 'tweet.type AS type', - 'tweet.post_date AS post_date', - 'tweet.images AS images', - 'tweet.videos AS videos', - 'tweet.num_likes AS num_likes', - 'tweet.num_reposts AS num_reposts', - 'tweet.num_views AS num_views', - 'tweet.num_quotes AS num_quotes', - 'tweet.num_replies AS num_replies', - 'tweet.created_at AS created_at', - 'tweet.updated_at AS updated_at', - `json_build_object( - 'id', tweet.tweet_author_id, - 'username', tweet.username, - 'name', tweet.name, - 'avatar_url', tweet.avatar_url, - 'cover_url', tweet.cover_url, - 'verified', tweet.verified, - 'bio', tweet.bio, - 'followers', tweet.followers, - 'following', tweet.following - ) AS user`, - ]) - .where('tweet.profile_user_id = :user_id', { user_id }) + let query = this.user_posts_view_repository.createQueryBuilder('tweet'); + + query = getPostsByUserIdProfileQuery(query, user_id); + + query = query .andWhere('tweet.type = :type', { type: 'reply' }) .orderBy('tweet.post_date', 'DESC') .addOrderBy('tweet.tweet_id', 'DESC') @@ -763,7 +386,8 @@ export class TweetsRepository extends Repository { 'tweet.tweet_id' ); - query = this.attachRepliedTweetQuery(query, current_user_id); + query = this.attachParentTweetQuery(query, current_user_id, 'tweet'); + query = this.attachConversationTweetQuery(query, current_user_id, 'tweet'); query = this.paginate_service.applyCursorPagination( query, @@ -774,6 +398,11 @@ export class TweetsRepository extends Repository { ); let tweets = await query.getRawMany(); + + // Increment views for fetched replies + const tweet_ids = tweets.map((t) => t.tweet_id).filter(Boolean); + this.incrementTweetViewsAsync(tweet_ids).catch(() => {}); + tweets = this.attachUserFollowFlags(tweets); const tweet_dtos = tweets.map((reply) => @@ -814,40 +443,11 @@ export class TweetsRepository extends Repository { }; }> { try { - let query = this.user_posts_view_repository - .createQueryBuilder('tweet') - .select([ - 'tweet.tweet_id AS tweet_id', - 'tweet.profile_user_id AS profile_user_id', - 'tweet.tweet_author_id AS tweet_author_id', - 'tweet.repost_id AS repost_id', - 'tweet.post_type AS post_type', - 'tweet.type AS type', - 'tweet.content AS content', - 'tweet.type AS type', - 'tweet.post_date AS post_date', - 'tweet.images AS images', - 'tweet.videos AS videos', - 'tweet.num_likes AS num_likes', - 'tweet.num_reposts AS num_reposts', - 'tweet.num_views AS num_views', - 'tweet.num_quotes AS num_quotes', - 'tweet.num_replies AS num_replies', - 'tweet.created_at AS created_at', - 'tweet.updated_at AS updated_at', - `json_build_object( - 'id', tweet.tweet_author_id, - 'username', tweet.username, - 'name', tweet.name, - 'avatar_url', tweet.avatar_url, - 'cover_url', tweet.cover_url, - 'verified', tweet.verified, - 'bio', tweet.bio, - 'followers', tweet.followers, - 'following', tweet.following - ) AS user`, - ]) - .where('tweet.profile_user_id = :user_id', { user_id }) + let query = this.user_posts_view_repository.createQueryBuilder('tweet'); + + query = getPostsByUserIdProfileQuery(query, user_id); + + query = query .andWhere( '(array_length(tweet.images, 1) > 0 OR array_length(tweet.videos, 1) > 0)' ) @@ -863,7 +463,8 @@ export class TweetsRepository extends Repository { 'tweet.tweet_id' ); - query = this.attachRepliedTweetQuery(query, current_user_id); + query = this.attachParentTweetQuery(query, current_user_id, 'tweet'); + query = this.attachConversationTweetQuery(query, current_user_id, 'tweet'); query = this.paginate_service.applyCursorPagination( query, @@ -874,6 +475,11 @@ export class TweetsRepository extends Repository { ); let tweets = await query.getRawMany(); + + // Increment views for fetched media + const tweet_ids = tweets.map((t) => t.tweet_id).filter(Boolean); + this.incrementTweetViewsAsync(tweet_ids).catch(() => {}); + tweets = this.attachUserFollowFlags(tweets); const tweet_dtos = tweets.map((reply) => @@ -920,45 +526,18 @@ export class TweetsRepository extends Repository { 'like', 'like.tweet_id = tweet.tweet_id AND like.user_id = :user_id', { user_id } - ) - .select([ - 'tweet.tweet_id AS tweet_id', - 'tweet.profile_user_id AS profile_user_id', - 'tweet.tweet_author_id AS tweet_author_id', - 'tweet.repost_id AS repost_id', - 'tweet.post_type AS post_type', - 'tweet.type AS type', - 'tweet.content AS content', - 'tweet.type AS type', - 'tweet.post_date AS post_date', - 'tweet.images AS images', - 'tweet.videos AS videos', - 'tweet.num_likes AS num_likes', - 'tweet.num_reposts AS num_reposts', - 'tweet.num_views AS num_views', - 'tweet.num_quotes AS num_quotes', - 'tweet.num_replies AS num_replies', - 'tweet.created_at AS created_at', - 'tweet.updated_at AS updated_at', - 'like.created_at AS liked_at', - `json_build_object( - 'id', tweet.tweet_author_id, - 'username', tweet.username, - 'name', tweet.name, - 'avatar_url', tweet.avatar_url, - 'cover_url', tweet.cover_url, - 'verified', tweet.verified, - 'bio', tweet.bio, - 'followers', tweet.followers, - 'following', tweet.following - ) AS user`, - ]) + ); + + query = getPostsByUserIdProfileQueryWithoutView(query, user_id); + + query = query .where('tweet.type != :type', { type: 'repost' }) .orderBy('like.created_at', 'DESC') .addOrderBy('tweet.tweet_id', 'DESC') .limit(limit); - query = this.attachQuotedTweetQuery(query); + query = this.attachParentTweetQuery(query, user_id, 'tweet'); + query = this.attachConversationTweetQuery(query, user_id, 'tweet'); query = this.attachUserInteractionBooleanFlags( query, @@ -967,8 +546,6 @@ export class TweetsRepository extends Repository { 'tweet.tweet_id' ); - query = this.attachRepliedTweetQuery(query, user_id); - query = this.paginate_service.applyCursorPagination( query, cursor, @@ -978,6 +555,11 @@ export class TweetsRepository extends Repository { ); let tweets = await query.getRawMany(); + + // Increment views for liked posts + const tweet_ids = tweets.map((t) => t.tweet_id).filter(Boolean); + this.incrementTweetViewsAsync(tweet_ids).catch(() => {}); + tweets = this.attachUserFollowFlags(tweets); const tweet_dtos = tweets.map((reply) => @@ -1004,84 +586,8 @@ export class TweetsRepository extends Repository { throw error; } } - attachQuotedTweetQuery(query: SelectQueryBuilder): SelectQueryBuilder { - // query - // .leftJoin( - // 'tweet_quotes', - // 'quote_rel', - // `quote_rel.quote_tweet_id = tweet.tweet_id AND tweet.type = 'quote'` - // ) - // .leftJoin( - // 'user_posts_view', - // 'quoted_tweet', - // 'quoted_tweet.tweet_id = quote_rel.original_tweet_id' - // ) - // .addSelect( - // `CASE - // WHEN tweet.type = 'quote' AND quoted_tweet.tweet_id IS NOT NULL THEN - // json_build_object( - // 'tweet_id', quoted_tweet.tweet_id, - // 'content', quoted_tweet.content, - // 'created_at', quoted_tweet.post_date, - // 'type', quoted_tweet.type, - // 'images', quoted_tweet.images, - // 'videos', quoted_tweet.videos, - - // 'user', json_build_object( - // 'id', quoted_tweet.tweet_author_id, - // 'username', quoted_tweet.username, - // 'name', quoted_tweet.name, - // 'avatar_url', quoted_tweet.avatar_url, - // 'verified', quoted_tweet.verified, - // 'bio', quoted_tweet.bio, - // 'cover_url', quoted_tweet.cover_url, - // 'followers', quoted_tweet.followers, - // 'following', quoted_tweet.following - // ) - // ) - // ELSE NULL - // END`, - // 'parent_tweet' - // ); - // return query; - query.addSelect( - ` - ( - SELECT json_build_object( - 'tweet_id', quoted_tweet.tweet_id, - 'content', quoted_tweet.content, - 'created_at', quoted_tweet.post_date, - 'type', quoted_tweet.type, - 'images', quoted_tweet.images, - 'videos', quoted_tweet.videos, - 'num_likes', quoted_tweet.num_likes, - 'num_reposts', quoted_tweet.num_reposts, - 'num_views', quoted_tweet.num_views, - 'num_replies', quoted_tweet.num_replies, - 'num_quotes', quoted_tweet.num_quotes, - 'user', json_build_object( - 'id', quoted_tweet.tweet_author_id, - 'username', quoted_tweet.username, - 'name', quoted_tweet.name, - 'avatar_url', quoted_tweet.avatar_url, - 'verified', quoted_tweet.verified, - 'bio', quoted_tweet.bio, - 'cover_url', quoted_tweet.cover_url, - 'followers', quoted_tweet.followers, - 'following', quoted_tweet.following - ) - ) - FROM tweet_quotes quote_rel - JOIN user_posts_view quoted_tweet - ON quoted_tweet.tweet_id = quote_rel.original_tweet_id - WHERE quote_rel.quote_tweet_id = tweet.tweet_id - LIMIT 1 - ) AS parent_tweet - ` - ); - return query; - } + /**************************** Attaches ****************************/ attachRepostInfo( query: SelectQueryBuilder, @@ -1092,126 +598,16 @@ export class TweetsRepository extends Repository { 'repost_id', ${table_alias}.repost_id, 'id', ${table_alias}.profile_user_id, 'name', ${table_alias}.reposted_by_name, + 'username', ${table_alias}.reposted_by_username, 'reposted_at', ${table_alias}.post_date ) ELSE NULL END AS reposted_by`); return query; } - attachRepliedTweetQuery( - query: SelectQueryBuilder, - user_id?: string - ): SelectQueryBuilder { - const get_interactions = (alias: string) => { - if (!user_id) return ''; - - return ` - 'is_liked', EXISTS( - SELECT 1 FROM tweet_likes - WHERE tweet_likes.tweet_id = ${alias}.tweet_id - AND tweet_likes.user_id = :current_user_id - ), - 'is_reposted', EXISTS( - SELECT 1 FROM tweet_reposts - WHERE tweet_reposts.tweet_id = ${alias}.tweet_id - AND tweet_reposts.user_id = :current_user_id - ), - 'is_following', EXISTS( - SELECT 1 FROM user_follows - WHERE user_follows.follower_id = :current_user_id - AND user_follows.followed_id = ${alias}.tweet_author_id - ), - 'is_follower', EXISTS( - SELECT 1 FROM user_follows - WHERE user_follows.follower_id = ${alias}.tweet_author_id - AND user_follows.followed_id = :current_user_id - ),`; - }; - - const parent_sub_query = this.data_source - .createQueryBuilder() - .select( - ` - json_build_object( - 'tweet_id', p.tweet_id, - 'content', p.content, - 'created_at', p.post_date, - 'type', p.type, - 'images', p.images, - 'videos', p.videos, - 'num_likes', p.num_likes, - 'num_reposts', p.num_reposts, - 'num_views', p.num_views, - 'num_replies', p.num_replies, - 'num_quotes', p.num_quotes, - ${get_interactions('p')} - 'user', json_build_object( - 'id', p.tweet_author_id, - 'username', p.username, - 'name', p.name, - 'avatar_url', p.avatar_url, - 'verified', p.verified, - 'bio', p.bio, - 'cover_url', p.cover_url, - 'followers', p.followers, - 'following', p.following - ) - ) - ` - ) - .from('tweet_replies', 'tr') - .leftJoin('user_posts_view', 'p', 'p.tweet_id = tr.original_tweet_id') - .where('tr.reply_tweet_id = tweet.tweet_id') - .limit(1); - - const conversation_sub_query = this.data_source - .createQueryBuilder() - .select( - ` - json_build_object( - 'tweet_id', c.tweet_id, - 'content', c.content, - 'created_at', c.post_date, - 'type', c.type, - 'images', c.images, - 'videos', c.videos, - 'num_likes', c.num_likes, - 'num_reposts', c.num_reposts, - 'num_views', c.num_views, - 'num_replies', c.num_replies, - 'num_quotes', c.num_quotes, - ${get_interactions('c')} - 'user', json_build_object( - 'id', c.tweet_author_id, - 'username', c.username, - 'name', c.name, - 'avatar_url', c.avatar_url, - 'verified', c.verified, - 'bio', c.bio, - 'cover_url', c.cover_url, - 'followers', c.followers, - 'following', c.following - ) - ) - ` - ) - .from('tweet_replies', 'tr2') - .leftJoin('user_posts_view', 'c', 'c.tweet_id = tr2.conversation_id') - .where('tr2.reply_tweet_id = tweet.tweet_id') - .limit(1); - - query - .addSelect(`(${parent_sub_query.getQuery()})`, 'parent_tweet') - .addSelect(`(${conversation_sub_query.getQuery()})`, 'conversation_tweet'); - - if (user_id) { - query.setParameter('current_user_id', user_id); - } - - return query; - } attachParentTweetQuery( query: SelectQueryBuilder, - user_id?: string + user_id?: string, + table_alias: string = 'ranked' ): SelectQueryBuilder { const get_interactions = (alias: string) => { if (!user_id) return ''; @@ -1227,6 +623,11 @@ export class TweetsRepository extends Repository { WHERE tweet_reposts.tweet_id = ${alias}.tweet_id AND tweet_reposts.user_id = :current_user_id ), + 'is_bookmarked', EXISTS( + SELECT 1 FROM tweet_bookmarks + WHERE tweet_bookmarks.tweet_id = ${alias}.tweet_id + AND tweet_bookmarks.user_id = :current_user_id + ), 'is_following', EXISTS( SELECT 1 FROM user_follows WHERE user_follows.follower_id = :current_user_id @@ -1243,7 +644,7 @@ export class TweetsRepository extends Repository { ` CASE -- For replies: get parent from tweet_replies - WHEN ranked.type = 'reply' or (ranked.type='repost' and ranked.post_type='reply')THEN ( + WHEN ${table_alias}.type = 'reply' or (${table_alias}.type='repost' and ${table_alias}.post_type='reply')THEN ( SELECT json_build_object( 'tweet_id', p.tweet_id, 'content', p.content, @@ -1256,6 +657,8 @@ export class TweetsRepository extends Repository { 'num_views', p.num_views, 'num_replies', p.num_replies, 'num_quotes', p.num_quotes, + 'num_bookmarks', p.num_bookmarks, + 'mentions', p.mentions, ${get_interactions('p')} -- Add nested quoted_tweet if conversation root is a quote 'parent_tweet', CASE @@ -1272,6 +675,8 @@ export class TweetsRepository extends Repository { 'num_views', pc.num_views, 'num_replies', pc.num_replies, 'num_quotes', pc.num_quotes, + 'num_bookmarks', pc.num_bookmarks, + 'mentions', pc.mentions, ${get_interactions('pc')} 'user', json_build_object( 'id', pc.tweet_author_id, @@ -1304,12 +709,12 @@ export class TweetsRepository extends Repository { ) ) FROM user_posts_view p - WHERE ranked.parent_id = p.tweet_id + WHERE ${table_alias}.parent_id = p.tweet_id LIMIT 1 ) -- For quotes: get parent from tweet_quotes - WHEN ranked.type = 'quote' or (ranked.type='repost' and ranked.post_type='quote' )THEN ( + WHEN ${table_alias}.type = 'quote' or (${table_alias}.type='repost' and ${table_alias}.post_type='quote' )THEN ( SELECT json_build_object( 'tweet_id', q.tweet_id, 'content', q.content, @@ -1322,6 +727,8 @@ export class TweetsRepository extends Repository { 'num_views', q.num_views, 'num_replies', q.num_replies, 'num_quotes', q.num_quotes, + 'num_bookmarks', q.num_bookmarks, + 'mentions', q.mentions, 'user', json_build_object( 'id', q.tweet_author_id, 'username', q.username, @@ -1335,7 +742,7 @@ export class TweetsRepository extends Repository { ) ) FROM user_posts_view q - WHERE ranked.parent_id = q.tweet_id + WHERE ${table_alias}.parent_id = q.tweet_id LIMIT 1 ) @@ -1353,7 +760,8 @@ export class TweetsRepository extends Repository { attachConversationTweetQuery( query: SelectQueryBuilder, - user_id?: string + user_id?: string, + table_alias: string = 'ranked' ): SelectQueryBuilder { const get_interactions = (alias: string) => { if (!user_id) return ''; @@ -1369,6 +777,11 @@ export class TweetsRepository extends Repository { WHERE tweet_reposts.tweet_id = ${alias}.tweet_id AND tweet_reposts.user_id = :current_user_id ), + 'is_bookmarked', EXISTS( + SELECT 1 FROM tweet_bookmarks + WHERE tweet_bookmarks.tweet_id = ${alias}.tweet_id + AND tweet_bookmarks.user_id = :current_user_id + ), 'is_following', EXISTS( SELECT 1 FROM user_follows WHERE user_follows.follower_id = :current_user_id @@ -1384,7 +797,7 @@ export class TweetsRepository extends Repository { query.addSelect( ` CASE - WHEN ranked.conversation_id IS NOT NULL THEN ( + WHEN ${table_alias}.conversation_id IS NOT NULL THEN ( SELECT json_build_object( 'tweet_id', c.tweet_id, 'content', c.content, @@ -1397,6 +810,8 @@ export class TweetsRepository extends Repository { 'num_views', c.num_views, 'num_replies', c.num_replies, 'num_quotes', c.num_quotes, + 'num_bookmarks', c.num_bookmarks, + 'mentions', c.mentions, ${get_interactions('c')} -- Add nested quoted_tweet if conversation root is a quote 'parent_tweet', CASE @@ -1413,6 +828,8 @@ export class TweetsRepository extends Repository { 'num_views', qc.num_views, 'num_replies', qc.num_replies, 'num_quotes', qc.num_quotes, + 'num_bookmarks', qc.num_bookmarks, + 'mentions', qc.mentions, ${get_interactions('qc')} 'user', json_build_object( 'id', qc.tweet_author_id, @@ -1445,7 +862,7 @@ export class TweetsRepository extends Repository { ) ) FROM user_posts_view c - WHERE ranked.conversation_id = c.tweet_id + WHERE ${table_alias}.conversation_id = c.tweet_id LIMIT 1 ) ELSE NULL @@ -1459,6 +876,7 @@ export class TweetsRepository extends Repository { return query; } + attachUserInteractionBooleanFlags( query: SelectQueryBuilder, current_user_id?: string, @@ -1483,6 +901,14 @@ export class TweetsRepository extends Repository { )`, 'is_reposted' ) + .addSelect( + `EXISTS( + SELECT 1 FROM tweet_bookmarks + WHERE tweet_bookmarks.tweet_id = ${tweet_id_column} + AND tweet_bookmarks.user_id = :current_user_id + )`, + 'is_bookmarked' + ) .addSelect( `EXISTS( SELECT 1 FROM user_follows @@ -1543,7 +969,7 @@ export class TweetsRepository extends Repository { return query; } - /**************************** Alyaa ****************************/ + /**************************** Profile ****************************/ /** * Fetches a reply tweet along with its entire parent chain using a single recursive query. @@ -1573,86 +999,7 @@ export class TweetsRepository extends Repository { } } - async getRecentTweetsByCategoryIds( - category_ids: string[], - user_id: string, - options: { - limit?: number; - since_hours_ago?: number; - } = {} - ): Promise { - const limit = options.limit ?? 300; - const since_hours_ago = options.since_hours_ago ?? 48; - - const query = this.tweet_repository - .createQueryBuilder('tweet') - .leftJoinAndSelect('tweet.user', 'user') - .innerJoin('tweet_category', 'tc', 'tc.tweet_id = tweet.tweet_id') - .where('tc.category_id = ANY(:category_ids)', { category_ids }) - .andWhere('tweet.created_at > NOW() - INTERVAL :hours hours', { - hours: since_hours_ago, - }) - .andWhere('tweet.user_id != :user_id', { user_id }) - // .andWhere( - // `tweet.user_id NOT IN ( - // SELECT followed_id FROM user_follows WHERE follower_id = :user_id - // )` - // ) - .orderBy('tweet.created_at', 'DESC') - .addOrderBy('tweet.tweet_id', 'DESC') - .take(limit + 50); // extra buffer - - // Attach all interaction flags - const final_query = this.attachUserTweetInteractionFlags(query, user_id, 'tweet'); - - const tweets = await final_query.getMany(); - - return tweets.map((tweet) => - plainToInstance(TweetResponseDTO, tweet, { - excludeExtraneousValues: true, - }) - ); - } - - async getTweetsCategories( - tweet_ids: string[] - ): Promise> { - try { - const query = this.tweet_category_repository - .createQueryBuilder('tc') - .select('tc.tweet_id', 'tweet_id') - .addSelect('tc.category_id', 'category_id') - .addSelect('tc.percentage', 'percentage') - .where('tc.tweet_id IN (:...tweet_ids)', { tweet_ids }) - .orderBy('tc.tweet_id', 'DESC') - .addOrderBy('tc.percentage', 'DESC'); - - const categories = await query.getMany(); - return ( - categories.reduce((acc, entity) => { - const tweet_id = entity.tweet_id; - - if (!acc[tweet_id]) { - acc[tweet_id] = []; - } - - acc[tweet_id].push({ - category_id: entity.category_id, - percentage: entity.percentage, - }); - - return acc; - }), - {} as Record - ); - } catch (error) { - console.log(error); - throw error; - } - } - //TODO: Attach user likes - attachUserFollowFlags(tweets: any[]) { return tweets.map((t) => { if (t.user) { diff --git a/src/tweets/tweets.service.spec.ts b/src/tweets/tweets.service.spec.ts index 8111bddf..6d40b33d 100644 --- a/src/tweets/tweets.service.spec.ts +++ b/src/tweets/tweets.service.spec.ts @@ -4,6 +4,7 @@ import { TweetsService } from './tweets.service'; import { getRepositoryToken } from '@nestjs/typeorm'; import { DataSource, Repository } from 'typeorm'; import { Tweet } from './entities/tweet.entity'; +import { TweetType } from 'src/shared/enums/tweet-types.enum'; import { TweetLike } from './entities/tweet-like.entity'; import { TweetRepost } from './entities/tweet-repost.entity'; import { TweetQuote } from './entities/tweet-quote.entity'; @@ -11,6 +12,7 @@ import { TweetReply } from './entities/tweet-reply.entity'; import { TweetBookmark } from './entities/tweet-bookmark.entity'; import { TweetSummary } from './entities/tweet-summary.entity'; import { UserFollows } from '../user/entities/user-follows.entity'; +import { User } from '../user/entities/user.entity'; import { UserPostsView } from './entities/user-posts-view.entity'; import { CreateTweetDTO } from './dto/create-tweet.dto'; import { PaginationService } from '../shared/services/pagination/pagination.service'; @@ -50,6 +52,8 @@ describe('TweetsService', () => { let reply_job_service: any; let quote_job_service: any; let mention_job_service: any; + let hashtag_job_service: any; + let pagination_service: any; beforeAll(() => { original_env = { ...process.env }; @@ -98,6 +102,7 @@ describe('TweetsService', () => { attachUserTweetInteractionFlags: jest.fn(), getReplyWithParentChain: jest.fn(), getReplies: jest.fn(), + getTweetsByIds: jest.fn(), }; const mock_azure_storage_service = { @@ -175,11 +180,17 @@ describe('TweetsService', () => { findOne: jest.fn(), merge: jest.fn(), upsert: jest.fn(), + find: jest.fn(), }, }; const mock_data_source = { createQueryRunner: jest.fn(() => mock_query_runner), + query: jest.fn(), + }; + + const mock_user_repo = { + find: jest.fn().mockResolvedValue([]), }; const module: TestingModule = await Test.createTestingModule({ @@ -191,6 +202,7 @@ describe('TweetsService', () => { { provide: getRepositoryToken(TweetQuote), useValue: mock_tweet_quote_repo }, { provide: getRepositoryToken(TweetReply), useValue: mock_tweet_reply_repo }, { provide: getRepositoryToken(TweetBookmark), useValue: mock_tweet_bookmark_repo }, + { provide: getRepositoryToken(User), useValue: mock_user_repo }, { provide: getRepositoryToken(TweetSummary), useValue: mock_tweet_summary_repo }, { provide: getRepositoryToken(UserFollows), useValue: mock_user_follows_repo }, { provide: getRepositoryToken(UserPostsView), useValue: mock_user_posts_view_repo }, @@ -222,6 +234,8 @@ describe('TweetsService', () => { reply_job_service = mock_reply_job_service; quote_job_service = mock_quote_job_service; mention_job_service = mock_mention_job_service; + hashtag_job_service = mock_hashtag_job_service; + pagination_service = mock_pagination_service; // Mock extractTopics to prevent real Groq API calls jest.spyOn(tweets_service as any, 'extractTopics').mockResolvedValue({ @@ -464,32 +478,30 @@ describe('TweetsService', () => { it('should delete the tweet successfully when user is authorized', async () => { const mock_tweet_id = 'tweet-123'; const mock_user_id = 'user-1'; - const mock_tweet = { tweet_id: mock_tweet_id, user_id: mock_user_id }; - const mock_delete_result = { affected: 1 }; + const mock_tweet = { + tweet_id: mock_tweet_id, + user_id: mock_user_id, + type: TweetType.TWEET, + }; - const find_one_spy = jest - .spyOn(tweet_repo, 'findOne') - .mockResolvedValue(mock_tweet as any); - const delete_spy = jest - .spyOn(tweet_repo, 'delete') - .mockResolvedValue(mock_delete_result as any); + jest.spyOn(mock_query_runner.manager, 'findOne').mockResolvedValue(mock_tweet as any); + jest.spyOn(mock_query_runner.manager, 'delete').mockResolvedValue({ + affected: 1, + } as any); await expect( tweets_service.deleteTweet(mock_tweet_id, mock_user_id) ).resolves.toBeUndefined(); - expect(find_one_spy).toHaveBeenCalledWith({ - where: { tweet_id: mock_tweet_id }, - select: ['tweet_id', 'user_id', 'type'], - }); - expect(delete_spy).toHaveBeenCalledWith({ tweet_id: mock_tweet_id }); + expect(mock_query_runner.connect).toHaveBeenCalled(); + expect(mock_query_runner.commitTransaction).toHaveBeenCalled(); }); it('should throw NotFoundException if tweet not found', async () => { const mock_tweet_id = 'missing-tweet'; const mock_user_id = 'user-1'; - jest.spyOn(tweet_repo, 'findOne').mockResolvedValue(null); + jest.spyOn(mock_query_runner.manager, 'findOne').mockResolvedValue(null); await expect(tweets_service.deleteTweet(mock_tweet_id, mock_user_id)).rejects.toThrow( 'Tweet not found' @@ -499,9 +511,13 @@ describe('TweetsService', () => { it('should throw BadRequestException if user not authorized', async () => { const mock_tweet_id = 'tweet-123'; const mock_user_id = 'user-1'; - const mock_tweet = { tweet_id: mock_tweet_id, user_id: 'different-user' }; + const mock_tweet = { + tweet_id: mock_tweet_id, + user_id: 'different-user', + type: TweetType.TWEET, + }; - jest.spyOn(tweet_repo, 'findOne').mockResolvedValue(mock_tweet as any); + jest.spyOn(mock_query_runner.manager, 'findOne').mockResolvedValue(mock_tweet as any); await expect(tweets_service.deleteTweet(mock_tweet_id, mock_user_id)).rejects.toThrow( 'User is not allowed to delete this tweet' @@ -513,14 +529,13 @@ describe('TweetsService', () => { const mock_user_id = 'user-1'; const db_error = new Error('Database failure'); - jest.spyOn(tweet_repo, 'findOne').mockRejectedValue(db_error); + jest.spyOn(mock_query_runner.manager, 'findOne').mockRejectedValue(db_error); await expect(tweets_service.deleteTweet(mock_tweet_id, mock_user_id)).rejects.toThrow( 'Database failure' ); }); - // TODO: Fix these tests - they need proper mocking of private method calls it('should delete reply tweet successfully', async () => { const mock_tweet_id = 'reply-tweet-123'; const mock_user_id = 'user-1'; @@ -530,7 +545,7 @@ describe('TweetsService', () => { const mock_reply_tweet = { tweet_id: mock_tweet_id, user_id: mock_user_id, - type: 'REPLY', + type: TweetType.REPLY, content: 'This is a reply', }; @@ -544,20 +559,20 @@ describe('TweetsService', () => { user_id: mock_parent_user_id, }; - tweet_repo.findOne = jest - .fn() + jest.spyOn(mock_query_runner.manager, 'findOne') .mockResolvedValueOnce(mock_reply_tweet as any) - .mockResolvedValueOnce(mock_original_tweet as any) - .mockResolvedValueOnce(mock_reply_tweet as any); - - tweet_reply_repo.findOne = jest.fn().mockResolvedValue(mock_tweet_reply as any); - tweet_repo.delete = jest.fn().mockResolvedValue({ affected: 1 } as any); + .mockResolvedValueOnce(mock_tweet_reply as any) + .mockResolvedValueOnce(mock_original_tweet as any); + jest.spyOn(mock_query_runner.manager, 'decrement').mockResolvedValue({} as any); + jest.spyOn(mock_query_runner.manager, 'delete').mockResolvedValue({ + affected: 1, + } as any); await expect( tweets_service.deleteTweet(mock_tweet_id, mock_user_id) ).resolves.not.toThrow(); - expect(tweet_repo.delete).toHaveBeenCalledWith({ tweet_id: mock_tweet_id }); + expect(mock_query_runner.commitTransaction).toHaveBeenCalled(); }); it('should delete quote tweet successfully', async () => { @@ -569,7 +584,7 @@ describe('TweetsService', () => { const mock_quote_tweet = { tweet_id: mock_tweet_id, user_id: mock_user_id, - type: 'QUOTE', + type: TweetType.QUOTE, content: 'This is a quote', }; @@ -583,20 +598,20 @@ describe('TweetsService', () => { user_id: mock_parent_user_id, }; - tweet_repo.findOne = jest - .fn() + jest.spyOn(mock_query_runner.manager, 'findOne') .mockResolvedValueOnce(mock_quote_tweet as any) - .mockResolvedValueOnce(mock_original_tweet as any) - .mockResolvedValueOnce(mock_quote_tweet as any); - - tweet_quote_repo.findOne = jest.fn().mockResolvedValue(mock_tweet_quote as any); - tweet_repo.delete = jest.fn().mockResolvedValue({ affected: 1 } as any); + .mockResolvedValueOnce(mock_tweet_quote as any) + .mockResolvedValueOnce(mock_original_tweet as any); + jest.spyOn(mock_query_runner.manager, 'decrement').mockResolvedValue({} as any); + jest.spyOn(mock_query_runner.manager, 'delete').mockResolvedValue({ + affected: 1, + } as any); await expect( tweets_service.deleteTweet(mock_tweet_id, mock_user_id) ).resolves.not.toThrow(); - expect(tweet_repo.delete).toHaveBeenCalledWith({ tweet_id: mock_tweet_id }); + expect(mock_query_runner.commitTransaction).toHaveBeenCalled(); }); it('should handle deletion of tweet with mentions', async () => { @@ -606,24 +621,28 @@ describe('TweetsService', () => { const mock_tweet = { tweet_id: mock_tweet_id, user_id: mock_user_id, - type: 'TWEET', + type: TweetType.TWEET, content: 'Hello @john @jane @alice', }; - jest.spyOn(tweet_repo, 'findOne') - .mockResolvedValueOnce(mock_tweet as any) - .mockResolvedValueOnce(mock_tweet as any); - jest.spyOn(tweet_repo, 'delete').mockResolvedValue({ affected: 1 } as any); + jest.spyOn(mock_query_runner.manager, 'findOne').mockResolvedValue(mock_tweet as any); + jest.spyOn(mock_query_runner.manager, 'delete').mockResolvedValue({ + affected: 1, + } as any); + // Mock tweet_repo.findOne for queueMentionDeleteJobs + jest.spyOn(tweet_repo, 'findOne').mockResolvedValue(mock_tweet as any); await tweets_service.deleteTweet(mock_tweet_id, mock_user_id); - expect(mention_job_service.queueMentionNotification).toHaveBeenCalledWith({ - tweet_id: mock_tweet_id, - mentioned_by: mock_user_id, - mentioned_usernames: ['john', 'jane', 'alice'], - tweet_type: 'tweet', - action: 'remove', - }); + expect(mention_job_service.queueMentionNotification).toHaveBeenCalledWith( + expect.objectContaining({ + tweet_id: mock_tweet_id, + mentioned_by: mock_user_id, + mentioned_user_ids: expect.arrayContaining(['@john', '@jane', '@alice']), + tweet_type: 'tweet', + action: 'remove', + }) + ); }); }); @@ -664,10 +683,6 @@ describe('TweetsService', () => { jest.spyOn(tweets_repo, 'getReplyWithParentChain').mockResolvedValue( mock_reply_chain as any ); - jest.spyOn(tweets_repo, 'getReplies').mockResolvedValue({ - tweets: [], - next_cursor: null, - } as any); const result = await tweets_service.getTweetById(mock_tweet_id, mock_user_id); @@ -758,10 +773,6 @@ describe('TweetsService', () => { jest.spyOn(tweets_repo, 'getReplyWithParentChain').mockResolvedValue( mock_reply_chain as any ); - jest.spyOn(tweets_repo, 'getReplies').mockResolvedValue({ - tweets: [], - next_cursor: null, - } as any); const result = await tweets_service.getTweetById(mock_tweet_id, mock_user_id); @@ -1742,14 +1753,22 @@ describe('TweetsService', () => { const mock_user_id = 'user-1'; const mock_tweet = { tweet_id: mock_tweet_id }; const mock_query_dto = { limit: 20, cursor: undefined }; - const mock_result = { - tweets: [], - next_cursor: null, - }; jest.spyOn(tweet_repo, 'findOne').mockResolvedValue(mock_tweet as any); - jest.spyOn(tweets_service['tweets_repository'], 'getReplies').mockResolvedValue( - mock_result as any + + const mock_query_builder = { + leftJoinAndSelect: jest.fn().mockReturnThis(), + innerJoin: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + take: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([]), + }; + + jest.spyOn(tweet_repo, 'createQueryBuilder').mockReturnValue(mock_query_builder as any); + jest.spyOn(tweets_repo, 'attachUserTweetInteractionFlags').mockReturnValue( + mock_query_builder as any ); const result = await tweets_service.getTweetReplies( @@ -1758,8 +1777,11 @@ describe('TweetsService', () => { mock_query_dto ); + expect(result).toBeDefined(); expect(result.data).toBeDefined(); - expect(result.next_cursor).toBeDefined(); + expect(Array.isArray(result.data)).toBe(true); + expect(result.count).toBeDefined(); + expect(result.has_more).toBeDefined(); }); it('should throw NotFoundException when tweet does not exist', async () => { @@ -2210,9 +2232,6 @@ describe('TweetsService', () => { }; const mock_user_id = 'user-123'; - const mention_spy = jest - .spyOn(tweets_service as any, 'updateHashtags') - .mockResolvedValue(undefined); const topics_spy = jest .spyOn(tweets_service as any, 'extractTopics') .mockResolvedValue({ @@ -2221,15 +2240,23 @@ describe('TweetsService', () => { News: 100, }); + // Mock the user repository find to return user data + const user_repo = (tweets_service as any).user_repository; + jest.spyOn(user_repo, 'find').mockResolvedValue([ + { username: 'user1', id: 'user-id-1' } as any, + ]); + const result = await (tweets_service as any).extractDataFromTweets( mock_tweet, mock_user_id, mock_query_runner ); - expect(mention_spy).toHaveBeenCalled(); expect(topics_spy).toHaveBeenCalled(); - expect(result).toEqual(['@user1']); + expect(result).toEqual({ + mentioned_user_ids: ['user-id-1'], + mentioned_usernames: ['user1'], + }); }); it('should return early when content is empty', async () => { @@ -2238,15 +2265,16 @@ describe('TweetsService', () => { }; const mock_user_id = 'user-123'; - const spy = jest.spyOn(tweets_service as any, 'mentionNotification'); - - await (tweets_service as any).extractDataFromTweets( + const result = await (tweets_service as any).extractDataFromTweets( mock_tweet, mock_user_id, mock_query_runner ); - expect(spy).not.toHaveBeenCalled(); + expect(result).toEqual({ + mentioned_user_ids: [], + mentioned_usernames: [], + }); }); }); @@ -2777,4 +2805,1511 @@ describe('TweetsService', () => { ); }); }); + + describe('getTweetLikes', () => { + it('should return paginated likes for tweet owner', async () => { + const tweet_id = 'tweet-123'; + const current_user_id = 'owner-123'; + const cursor = undefined; + const limit = 20; + + const mock_tweet = { + tweet_id, + num_likes: 2, + user_id: current_user_id, + }; + + const mock_likes = [ + { + user: { + id: 'user-1', + username: 'user1', + name: 'User One', + }, + created_at: new Date('2024-01-01'), + following_relation: { follower_id: current_user_id }, + follower_relation: null, + }, + { + user: { + id: 'user-2', + username: 'user2', + name: 'User Two', + }, + created_at: new Date('2024-01-02'), + following_relation: null, + follower_relation: { follower_id: 'user-2' }, + }, + ]; + + (tweet_repo.findOne as jest.Mock).mockResolvedValue(mock_tweet); + + const mock_query_builder = { + leftJoinAndSelect: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + addOrderBy: jest.fn().mockReturnThis(), + take: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue(mock_likes), + }; + + jest.spyOn(tweet_like_repo, 'createQueryBuilder').mockReturnValue( + mock_query_builder as any + ); + jest.spyOn(pagination_service, 'applyCursorPagination').mockReturnValue( + mock_query_builder as any + ); + jest.spyOn(pagination_service, 'generateNextCursor').mockReturnValue('next-cursor'); + + const result = await tweets_service.getTweetLikes( + tweet_id, + current_user_id, + cursor, + limit + ); + + expect(tweet_repo.findOne).toHaveBeenCalledWith({ + where: { tweet_id }, + select: ['tweet_id', 'num_likes', 'user_id'], + }); + expect(result.data).toBeDefined(); + expect(result.next_cursor).toBe('next-cursor'); + expect(result.has_more).toBe(false); + }); + + it('should throw NotFoundException when tweet does not exist', async () => { + const tweet_id = 'nonexistent-tweet'; + const current_user_id = 'user-123'; + + (tweet_repo.findOne as jest.Mock).mockResolvedValue(null); + + await expect(tweets_service.getTweetLikes(tweet_id, current_user_id)).rejects.toThrow( + 'Tweet not found' + ); + }); + + it('should throw BadRequestException when non-owner tries to view likes', async () => { + const tweet_id = 'tweet-123'; + const current_user_id = 'non-owner-123'; + + const mock_tweet = { + tweet_id, + num_likes: 5, + user_id: 'owner-456', + }; + + (tweet_repo.findOne as jest.Mock).mockResolvedValue(mock_tweet); + + await expect(tweets_service.getTweetLikes(tweet_id, current_user_id)).rejects.toThrow( + 'Only the tweet owner can see who liked their tweet' + ); + }); + + it('should apply cursor pagination correctly', async () => { + const tweet_id = 'tweet-123'; + const current_user_id = 'owner-123'; + const cursor = '2024-01-01T00:00:00.000Z_user-1'; + const limit = 10; + + const mock_tweet = { + tweet_id, + num_likes: 1, + user_id: current_user_id, + }; + + (tweet_repo.findOne as jest.Mock).mockResolvedValue(mock_tweet); + + const mock_query_builder = { + leftJoinAndSelect: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + addOrderBy: jest.fn().mockReturnThis(), + take: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([]), + }; + + jest.spyOn(tweet_like_repo, 'createQueryBuilder').mockReturnValue( + mock_query_builder as any + ); + jest.spyOn(pagination_service, 'applyCursorPagination').mockReturnValue( + mock_query_builder as any + ); + jest.spyOn(pagination_service, 'generateNextCursor').mockReturnValue(null); + + await tweets_service.getTweetLikes(tweet_id, current_user_id, cursor, limit); + + expect(pagination_service.applyCursorPagination).toHaveBeenCalledWith( + mock_query_builder, + cursor, + 'like', + 'created_at', + 'user_id' + ); + }); + }); + + describe('getTweetReposts', () => { + it('should return paginated reposts', async () => { + const tweet_id = 'tweet-123'; + const current_user_id = 'user-123'; + const cursor = undefined; + const limit = 20; + + const mock_tweet = { + tweet_id, + num_reposts: 2, + user_id: 'owner-123', + }; + + const mock_reposts = [ + { + user: { + id: 'user-1', + username: 'user1', + name: 'User One', + }, + created_at: new Date('2024-01-01'), + following_relation: { follower_id: current_user_id }, + follower_relation: null, + }, + ]; + + (tweet_repo.findOne as jest.Mock).mockResolvedValue(mock_tweet); + + const mock_query_builder = { + leftJoinAndSelect: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + addOrderBy: jest.fn().mockReturnThis(), + take: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue(mock_reposts), + }; + + jest.spyOn(tweet_repost_repo, 'createQueryBuilder').mockReturnValue( + mock_query_builder as any + ); + jest.spyOn(pagination_service, 'applyCursorPagination').mockReturnValue( + mock_query_builder as any + ); + jest.spyOn(pagination_service, 'generateNextCursor').mockReturnValue('next-cursor'); + + const result = await tweets_service.getTweetReposts( + tweet_id, + current_user_id, + cursor, + limit + ); + + expect(tweet_repo.findOne).toHaveBeenCalledWith({ + where: { tweet_id }, + select: ['tweet_id', 'num_reposts', 'user_id'], + }); + expect(result.data).toBeDefined(); + expect(result.next_cursor).toBe('next-cursor'); + expect(result.has_more).toBe(false); + }); + + it('should throw NotFoundException when tweet does not exist', async () => { + const tweet_id = 'nonexistent-tweet'; + const current_user_id = 'user-123'; + + (tweet_repo.findOne as jest.Mock).mockResolvedValue(null); + + await expect(tweets_service.getTweetReposts(tweet_id, current_user_id)).rejects.toThrow( + 'Tweet not found' + ); + }); + + it('should handle has_more flag when limit is reached', async () => { + const tweet_id = 'tweet-123'; + const current_user_id = 'user-123'; + const limit = 2; + + const mock_tweet = { + tweet_id, + num_reposts: 2, + user_id: 'owner-123', + }; + + const mock_reposts = [ + { + user: { id: 'user-1', username: 'user1' }, + created_at: new Date('2024-01-01'), + following_relation: null, + follower_relation: null, + }, + { + user: { id: 'user-2', username: 'user2' }, + created_at: new Date('2024-01-02'), + following_relation: null, + follower_relation: null, + }, + ]; + + (tweet_repo.findOne as jest.Mock).mockResolvedValue(mock_tweet); + + const mock_query_builder = { + leftJoinAndSelect: jest.fn().mockReturnThis(), + leftJoinAndMapOne: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + addOrderBy: jest.fn().mockReturnThis(), + take: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue(mock_reposts), + }; + + jest.spyOn(tweet_repost_repo, 'createQueryBuilder').mockReturnValue( + mock_query_builder as any + ); + jest.spyOn(pagination_service, 'applyCursorPagination').mockReturnValue( + mock_query_builder as any + ); + jest.spyOn(pagination_service, 'generateNextCursor').mockReturnValue('next-cursor'); + + const result = await tweets_service.getTweetReposts( + tweet_id, + current_user_id, + undefined, + limit + ); + + expect(result.has_more).toBe(true); + }); + }); + + describe('getTweetQuotes', () => { + it('should return paginated quote tweets', async () => { + const tweet_id = 'tweet-123'; + const current_user_id = 'user-123'; + const cursor = undefined; + const limit = 20; + + const mock_tweet = { + tweet_id, + num_quotes: 1, + user: { id: 'owner-123', username: 'owner' }, + }; + + const mock_quotes = [ + { + quote_tweet: { + tweet_id: 'quote-1', + content: 'Quote content', + user: { id: 'user-1', username: 'user1' }, + created_at: new Date('2024-01-01'), + }, + }, + ]; + + const mock_query_builder = { + leftJoinAndSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + getOne: jest.fn().mockResolvedValue(mock_tweet), + }; + + const mock_quote_query_builder = { + leftJoinAndSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + addOrderBy: jest.fn().mockReturnThis(), + take: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue(mock_quotes), + }; + + jest.spyOn(tweet_repo, 'createQueryBuilder').mockReturnValue(mock_query_builder as any); + jest.spyOn(tweet_quote_repo, 'createQueryBuilder').mockReturnValue( + mock_quote_query_builder as any + ); + jest.spyOn(tweets_repo, 'attachUserTweetInteractionFlags').mockReturnValue( + mock_quote_query_builder as any + ); + jest.spyOn(pagination_service, 'applyCursorPagination').mockReturnValue( + mock_quote_query_builder as any + ); + jest.spyOn(pagination_service, 'generateNextCursor').mockReturnValue('next-cursor'); + jest.spyOn(tweets_service as any, 'incrementTweetViewsAsync').mockResolvedValue( + undefined + ); + + const result = await tweets_service.getTweetQuotes( + tweet_id, + current_user_id, + cursor, + limit + ); + + expect(result.data).toBeDefined(); + expect(result.count).toBe(1); + expect(result.parent).toEqual(mock_tweet); + expect(result.next_cursor).toBe('next-cursor'); + expect(result.has_more).toBe(false); + }); + + it('should throw NotFoundException when tweet does not exist', async () => { + const tweet_id = 'nonexistent-tweet'; + + const mock_query_builder = { + leftJoinAndSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + getOne: jest.fn().mockResolvedValue(null), + }; + + jest.spyOn(tweet_repo, 'createQueryBuilder').mockReturnValue(mock_query_builder as any); + + await expect(tweets_service.getTweetQuotes(tweet_id)).rejects.toThrow( + 'Tweet not found' + ); + }); + + it('should work without current_user_id', async () => { + const tweet_id = 'tweet-123'; + + const mock_tweet = { + tweet_id, + num_quotes: 0, + user: { id: 'owner-123', username: 'owner' }, + }; + + const mock_query_builder = { + leftJoinAndSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + getOne: jest.fn().mockResolvedValue(mock_tweet), + }; + + const mock_quote_query_builder = { + leftJoinAndSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + addOrderBy: jest.fn().mockReturnThis(), + take: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue([]), + }; + + jest.spyOn(tweet_repo, 'createQueryBuilder').mockReturnValue(mock_query_builder as any); + jest.spyOn(tweet_quote_repo, 'createQueryBuilder').mockReturnValue( + mock_quote_query_builder as any + ); + jest.spyOn(tweets_repo, 'attachUserTweetInteractionFlags').mockReturnValue( + mock_quote_query_builder as any + ); + jest.spyOn(pagination_service, 'applyCursorPagination').mockReturnValue( + mock_quote_query_builder as any + ); + jest.spyOn(pagination_service, 'generateNextCursor').mockReturnValue(null); + jest.spyOn(tweets_service as any, 'incrementTweetViewsAsync').mockResolvedValue( + undefined + ); + + const result = await tweets_service.getTweetQuotes(tweet_id); + + expect(result.data).toHaveLength(0); + expect(result.count).toBe(0); + }); + + it('should increment views for quote tweets', async () => { + const tweet_id = 'tweet-123'; + + const mock_tweet = { + tweet_id, + num_quotes: 1, + user: { id: 'owner-123', username: 'owner' }, + }; + + const mock_quotes = [ + { + quote_tweet: { + tweet_id: 'quote-1', + content: 'Quote', + created_at: new Date(), + }, + }, + ]; + + const mock_query_builder = { + leftJoinAndSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + getOne: jest.fn().mockResolvedValue(mock_tweet), + }; + + const mock_quote_query_builder = { + leftJoinAndSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + addOrderBy: jest.fn().mockReturnThis(), + take: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue(mock_quotes), + }; + + jest.spyOn(tweet_repo, 'createQueryBuilder').mockReturnValue(mock_query_builder as any); + jest.spyOn(tweet_quote_repo, 'createQueryBuilder').mockReturnValue( + mock_quote_query_builder as any + ); + jest.spyOn(tweets_repo, 'attachUserTweetInteractionFlags').mockReturnValue( + mock_quote_query_builder as any + ); + jest.spyOn(pagination_service, 'applyCursorPagination').mockReturnValue( + mock_quote_query_builder as any + ); + jest.spyOn(pagination_service, 'generateNextCursor').mockReturnValue(null); + + const increment_spy = jest + .spyOn(tweets_service as any, 'incrementTweetViewsAsync') + .mockResolvedValue(undefined); + + await tweets_service.getTweetQuotes(tweet_id); + + expect(increment_spy).toHaveBeenCalledWith(['quote-1']); + }); + }); + + describe('getTweetReplies', () => { + it('should return paginated replies', async () => { + const tweet_id = 'tweet-123'; + const current_user_id = 'user-123'; + const query_dto = { cursor: undefined, limit: 20 }; + + const mock_tweet = { + tweet_id, + }; + + const mock_replies = [ + { + tweet_id: 'reply-1', + content: 'Reply content', + user: { id: 'user-1', username: 'user1' }, + created_at: new Date('2024-01-01'), + }, + ]; + + (tweet_repo.findOne as jest.Mock).mockResolvedValue(mock_tweet); + + const mock_query_builder = { + leftJoinAndSelect: jest.fn().mockReturnThis(), + innerJoin: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + take: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue(mock_replies), + }; + + jest.spyOn(tweet_repo, 'createQueryBuilder').mockReturnValue(mock_query_builder as any); + jest.spyOn(tweets_repo, 'attachUserTweetInteractionFlags').mockReturnValue( + mock_query_builder as any + ); + jest.spyOn(pagination_service, 'applyCursorPagination').mockReturnValue( + mock_query_builder as any + ); + jest.spyOn(pagination_service, 'generateNextCursor').mockReturnValue('next-cursor'); + jest.spyOn(tweets_service as any, 'incrementTweetViewsAsync').mockResolvedValue( + undefined + ); + + const result = await tweets_service.getTweetReplies( + tweet_id, + current_user_id, + query_dto + ); + + expect(tweet_repo.findOne).toHaveBeenCalledWith({ + where: { tweet_id }, + select: ['tweet_id'], + }); + expect(result.data).toBeDefined(); + expect(result.count).toBe(1); + expect(result.has_more).toBe(false); + }); + + it('should throw NotFoundException when tweet does not exist', async () => { + const tweet_id = 'nonexistent-tweet'; + const current_user_id = 'user-123'; + const query_dto = { cursor: undefined, limit: 20 }; + + (tweet_repo.findOne as jest.Mock).mockResolvedValue(null); + + await expect( + tweets_service.getTweetReplies(tweet_id, current_user_id, query_dto) + ).rejects.toThrow('Tweet not found'); + }); + + it('should handle has_more flag correctly when more replies exist', async () => { + const tweet_id = 'tweet-123'; + const current_user_id = 'user-123'; + const query_dto = { cursor: undefined, limit: 2 }; + + const mock_tweet = { tweet_id }; + + const mock_replies = [ + { + tweet_id: 'reply-1', + content: 'Reply 1', + created_at: new Date('2024-01-01'), + }, + { + tweet_id: 'reply-2', + content: 'Reply 2', + created_at: new Date('2024-01-02'), + }, + { + tweet_id: 'reply-3', + content: 'Reply 3', + created_at: new Date('2024-01-03'), + }, + ]; + + (tweet_repo.findOne as jest.Mock).mockResolvedValue(mock_tweet); + + const mock_query_builder = { + leftJoinAndSelect: jest.fn().mockReturnThis(), + innerJoin: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + take: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue(mock_replies), + }; + + jest.spyOn(tweet_repo, 'createQueryBuilder').mockReturnValue(mock_query_builder as any); + jest.spyOn(tweets_repo, 'attachUserTweetInteractionFlags').mockReturnValue( + mock_query_builder as any + ); + jest.spyOn(pagination_service, 'applyCursorPagination').mockReturnValue( + mock_query_builder as any + ); + jest.spyOn(pagination_service, 'generateNextCursor').mockReturnValue(null); + jest.spyOn(tweets_service as any, 'incrementTweetViewsAsync').mockResolvedValue( + undefined + ); + + const result = await tweets_service.getTweetReplies( + tweet_id, + current_user_id, + query_dto + ); + + expect(result.data).toHaveLength(2); + expect(result.has_more).toBe(true); + }); + + it('should increment views for reply tweets', async () => { + const tweet_id = 'tweet-123'; + const current_user_id = 'user-123'; + const query_dto = { cursor: undefined, limit: 20 }; + + const mock_tweet = { tweet_id }; + const mock_replies = [ + { tweet_id: 'reply-1', created_at: new Date() }, + { tweet_id: 'reply-2', created_at: new Date() }, + ]; + + (tweet_repo.findOne as jest.Mock).mockResolvedValue(mock_tweet); + + const mock_query_builder = { + leftJoinAndSelect: jest.fn().mockReturnThis(), + innerJoin: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + orderBy: jest.fn().mockReturnThis(), + take: jest.fn().mockReturnThis(), + getMany: jest.fn().mockResolvedValue(mock_replies), + }; + + jest.spyOn(tweet_repo, 'createQueryBuilder').mockReturnValue(mock_query_builder as any); + jest.spyOn(tweets_repo, 'attachUserTweetInteractionFlags').mockReturnValue( + mock_query_builder as any + ); + jest.spyOn(pagination_service, 'applyCursorPagination').mockReturnValue( + mock_query_builder as any + ); + jest.spyOn(pagination_service, 'generateNextCursor').mockReturnValue(null); + + const increment_spy = jest + .spyOn(tweets_service as any, 'incrementTweetViewsAsync') + .mockResolvedValue(undefined); + + await tweets_service.getTweetReplies(tweet_id, current_user_id, query_dto); + + expect(increment_spy).toHaveBeenCalledWith(['reply-1', 'reply-2']); + }); + }); + + describe('incrementTweetViews', () => { + it('should increment tweet views successfully', async () => { + const tweet_id = 'tweet-123'; + const mock_tweet = { tweet_id, num_views: 10 }; + + (tweet_repo.findOne as jest.Mock).mockResolvedValue(mock_tweet); + (tweet_repo.increment as jest.Mock) = jest.fn().mockResolvedValue(undefined); + + const result = await tweets_service.incrementTweetViews(tweet_id); + + expect(tweet_repo.findOne).toHaveBeenCalledWith({ where: { tweet_id } }); + expect(tweet_repo.increment).toHaveBeenCalledWith({ tweet_id }, 'num_views', 1); + expect(result).toEqual({ success: true }); + }); + + it('should throw NotFoundException when tweet does not exist', async () => { + const tweet_id = 'nonexistent-tweet'; + + (tweet_repo.findOne as jest.Mock).mockResolvedValue(null); + + await expect(tweets_service.incrementTweetViews(tweet_id)).rejects.toThrow( + 'Tweet not found' + ); + }); + + it('should handle and rethrow database errors', async () => { + const tweet_id = 'tweet-123'; + const mock_tweet = { tweet_id }; + + (tweet_repo.findOne as jest.Mock).mockResolvedValue(mock_tweet); + (tweet_repo.increment as jest.Mock) = jest + .fn() + .mockRejectedValue(new Error('Database error')); + + await expect(tweets_service.incrementTweetViews(tweet_id)).rejects.toThrow( + 'Database error' + ); + }); + }); + + describe('getTweetsByIds', () => { + it('should return tweets by IDs', async () => { + const tweet_ids = ['tweet-1', 'tweet-2']; + const current_user_id = 'user-123'; + const mock_tweets = [ + { tweet_id: 'tweet-1', content: 'Tweet 1' }, + { tweet_id: 'tweet-2', content: 'Tweet 2' }, + ]; + + jest.spyOn(tweets_repo, 'getTweetsByIds').mockResolvedValue(mock_tweets as any); + + const result = await tweets_service.getTweetsByIds(tweet_ids, current_user_id); + + expect(result).toEqual(mock_tweets); + }); + + it('should handle errors when fetching tweets by IDs', async () => { + const tweet_ids = ['tweet-1']; + const error = new Error('Database error'); + const console_error_spy = jest.spyOn(console, 'error').mockImplementation(); + + jest.spyOn(tweets_repo, 'getTweetsByIds').mockRejectedValue(error); + + await expect(tweets_service.getTweetsByIds(tweet_ids)).rejects.toThrow( + 'Database error' + ); + + console_error_spy.mockRestore(); + }); + }); + + describe('incrementTweetViewsAsync', () => { + it('should increment views for multiple tweets', async () => { + const tweet_ids = ['tweet-1', 'tweet-2', 'tweet-3']; + const query_spy = jest.spyOn(data_source, 'query').mockResolvedValue(undefined as any); + + await (tweets_service as any).incrementTweetViewsAsync(tweet_ids); + + expect(query_spy).toHaveBeenCalledWith( + 'SELECT increment_tweet_views_batch($1::uuid[])', + [tweet_ids] + ); + }); + + it('should return early if tweet_ids array is empty', async () => { + const query_spy = jest.spyOn(data_source, 'query').mockResolvedValue(undefined as any); + + await (tweets_service as any).incrementTweetViewsAsync([]); + + expect(query_spy).not.toHaveBeenCalled(); + }); + + it('should handle database errors silently', async () => { + const tweet_ids = ['tweet-1']; + const console_error_spy = jest.spyOn(console, 'error').mockImplementation(); + jest.spyOn(data_source, 'query').mockRejectedValue(new Error('DB Error')); + + // Should not throw + await expect( + (tweets_service as any).incrementTweetViewsAsync(tweet_ids) + ).resolves.toBeUndefined(); + + console_error_spy.mockRestore(); + }); + }); + + describe('getTweetById', () => { + it('should return tweet by ID with user', async () => { + const tweet_id = 'tweet-123'; + const current_user_id = 'user-123'; + const mock_tweet = { + tweet_id, + content: 'Test tweet', + user: { id: 'user-1', username: 'user1' }, + }; + + jest.spyOn(tweets_service as any, 'getTweetWithUserById').mockResolvedValue(mock_tweet); + + const result = await tweets_service.getTweetById(tweet_id, current_user_id); + + expect(result).toEqual(mock_tweet); + }); + + it('should handle errors and rethrow', async () => { + const tweet_id = 'tweet-123'; + const error = new Error('Tweet not found'); + const console_error_spy = jest.spyOn(console, 'error').mockImplementation(); + + jest.spyOn(tweets_service as any, 'getTweetWithUserById').mockRejectedValue(error); + + await expect(tweets_service.getTweetById(tweet_id)).rejects.toThrow('Tweet not found'); + expect(console_error_spy).toHaveBeenCalled(); + + console_error_spy.mockRestore(); + }); + }); + + describe('uploadImageToAzure', () => { + it('should upload image to Azure successfully', async () => { + const image_buffer = Buffer.from('test'); + const image_name = 'test-image.jpg'; + const container_name = 'post-images'; + + process.env.AZURE_STORAGE_CONNECTION_STRING = + 'DefaultEndpointsProtocol=https;AccountName=test;AccountKey=testkey;EndpointSuffix=core.windows.net'; + + const mock_blob_url = 'https://test.blob.core.windows.net/post-images/test-image.jpg'; + const mock_upload = jest.fn().mockResolvedValue({}); + const mock_block_blob_client = { + upload: mock_upload, + url: mock_blob_url, + }; + const mock_container_client = { + createIfNotExists: jest.fn().mockResolvedValue({}), + getBlockBlobClient: jest.fn().mockReturnValue(mock_block_blob_client), + }; + const mock_blob_service_client = { + getContainerClient: jest.fn().mockReturnValue(mock_container_client), + }; + + (BlobServiceClient.fromConnectionString as jest.Mock).mockReturnValue( + mock_blob_service_client + ); + + const result = await (tweets_service as any).uploadImageToAzure( + image_buffer, + image_name, + container_name + ); + + expect(result).toBe(mock_blob_url); + expect(mock_upload).toHaveBeenCalled(); + }); + + it('should throw error when connection string is missing', async () => { + delete process.env.AZURE_STORAGE_CONNECTION_STRING; + + const image_buffer = Buffer.from('image data'); + const image_name = 'test.jpg'; + const container_name = 'images'; + + await expect( + (tweets_service as any).uploadImageToAzure(image_buffer, image_name, container_name) + ).rejects.toThrow('AZURE_STORAGE_CONNECTION_STRING is not defined'); + }); + }); + + describe('queueRepostAndQuoteDeleteJobs', () => { + it('should handle reply deletion and decrement reply count', async () => { + const tweet = { tweet_id: 'reply-123', user_id: 'user-123', type: TweetType.REPLY }; + const user_id = 'user-123'; + + const mock_reply_info = { original_tweet_id: 'parent-123' }; + const mock_original_tweet = { user_id: 'parent-owner' }; + + jest.spyOn(mock_query_runner.manager, 'findOne') + .mockResolvedValueOnce(mock_reply_info) + .mockResolvedValueOnce(mock_original_tweet); + jest.spyOn(mock_query_runner.manager, 'decrement').mockResolvedValue(undefined as any); + + await (tweets_service as any).queueRepostAndQuoteDeleteJobs( + tweet, + TweetType.REPLY, + user_id, + mock_query_runner + ); + + expect(mock_query_runner.manager.decrement).toHaveBeenCalledWith( + expect.any(Function), + { tweet_id: 'parent-123' }, + 'num_replies', + 1 + ); + expect(reply_job_service.queueReplyNotification).toHaveBeenCalled(); + }); + + it('should handle quote deletion and decrement counts', async () => { + const tweet = { tweet_id: 'quote-123', user_id: 'user-123', type: TweetType.QUOTE }; + const user_id = 'user-123'; + + const mock_quote_info = { original_tweet_id: 'parent-123' }; + const mock_original_tweet = { user_id: 'parent-owner' }; + + jest.spyOn(mock_query_runner.manager, 'findOne') + .mockResolvedValueOnce(mock_quote_info) + .mockResolvedValueOnce(mock_original_tweet); + jest.spyOn(mock_query_runner.manager, 'decrement').mockResolvedValue(undefined as any); + + await (tweets_service as any).queueRepostAndQuoteDeleteJobs( + tweet, + TweetType.QUOTE, + user_id, + mock_query_runner + ); + + expect(mock_query_runner.manager.decrement).toHaveBeenCalledTimes(2); + expect(quote_job_service.queueQuoteNotification).toHaveBeenCalled(); + }); + + it('should handle errors gracefully', async () => { + const tweet = { tweet_id: 'tweet-123', user_id: 'user-123', type: TweetType.REPLY }; + const console_error_spy = jest.spyOn(console, 'error').mockImplementation(); + + jest.spyOn(mock_query_runner.manager, 'findOne').mockRejectedValue( + new Error('DB Error') + ); + + await (tweets_service as any).queueRepostAndQuoteDeleteJobs( + tweet, + TweetType.REPLY, + 'user-123', + mock_query_runner + ); + + expect(console_error_spy).toHaveBeenCalled(); + console_error_spy.mockRestore(); + }); + + it('should not queue notification if no parent owner found', async () => { + const tweet = { tweet_id: 'reply-123', user_id: 'user-123', type: TweetType.REPLY }; + + const mock_reply_info = { original_tweet_id: 'parent-123' }; + + jest.spyOn(mock_query_runner.manager, 'findOne') + .mockResolvedValueOnce(mock_reply_info) + .mockResolvedValueOnce(null); + jest.spyOn(mock_query_runner.manager, 'decrement').mockResolvedValue(undefined as any); + + await (tweets_service as any).queueRepostAndQuoteDeleteJobs( + tweet, + TweetType.REPLY, + 'user-123', + mock_query_runner + ); + + expect(reply_job_service.queueReplyNotification).not.toHaveBeenCalled(); + }); + }); + + describe('queueMentionDeleteJobs', () => { + it('should extract mentions and queue deletion notifications', async () => { + const tweet = { tweet_id: 'tweet-123', content: 'Hello @user1 and @user2' }; + const user_id = 'user-123'; + + (tweet_repo.findOne as jest.Mock).mockResolvedValue(tweet); + jest.spyOn(tweets_service as any, 'mentionNotification').mockResolvedValue(undefined); + + await (tweets_service as any).queueMentionDeleteJobs(tweet, user_id); + + expect(tweets_service['mentionNotification']).toHaveBeenCalledWith( + ['@user1', '@user2'], + user_id, + tweet, + 'remove' + ); + }); + + it('should return early if content is empty', async () => { + const tweet = { tweet_id: 'tweet-123' }; + const user_id = 'user-123'; + + (tweet_repo.findOne as jest.Mock).mockResolvedValue(tweet); + + await (tweets_service as any).queueMentionDeleteJobs(tweet, user_id); + + // Should not throw and should return early + expect(tweet_repo.findOne).toHaveBeenCalled(); + }); + + it('should return early if no mentions found', async () => { + const tweet = { tweet_id: 'tweet-123', content: 'No mentions here' }; + const user_id = 'user-123'; + + (tweet_repo.findOne as jest.Mock).mockResolvedValue(tweet); + + await (tweets_service as any).queueMentionDeleteJobs(tweet, user_id); + + // Should not throw + expect(tweet_repo.findOne).toHaveBeenCalled(); + }); + + it('should handle errors gracefully', async () => { + const tweet = { tweet_id: 'tweet-123' }; + const console_error_spy = jest.spyOn(console, 'error').mockImplementation(); + + (tweet_repo.findOne as jest.Mock).mockRejectedValue(new Error('DB Error')); + + await (tweets_service as any).queueMentionDeleteJobs(tweet, 'user-123'); + + expect(console_error_spy).toHaveBeenCalled(); + console_error_spy.mockRestore(); + }); + }); + + describe('getTweetWithUserById', () => { + it('should return tweet with user data for non-reply tweet', async () => { + const tweet_id = 'tweet-123'; + const current_user_id = 'user-123'; + + const mock_tweet = { + tweet_id, + content: 'Test', + type: TweetType.TWEET, + user: { id: 'user-1', username: 'user1' }, + }; + + const mock_query_builder = { + leftJoinAndSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + getOne: jest.fn().mockResolvedValue(mock_tweet), + }; + + jest.spyOn(tweet_repo, 'createQueryBuilder').mockReturnValue(mock_query_builder as any); + jest.spyOn(tweets_repo, 'attachUserTweetInteractionFlags').mockReturnValue( + mock_query_builder as any + ); + jest.spyOn(tweets_service as any, 'incrementTweetViewsAsync').mockResolvedValue( + undefined + ); + jest.spyOn(tweets_service as any, 'getReplyWithUserById').mockResolvedValue(null); + + const result = await (tweets_service as any).getTweetWithUserById( + tweet_id, + current_user_id + ); + + expect(result).toBeDefined(); + expect(tweets_service['incrementTweetViewsAsync']).toHaveBeenCalledWith([tweet_id]); + }); + + it('should throw NotFoundException if tweet not found', async () => { + const tweet_id = 'nonexistent'; + + const mock_query_builder = { + leftJoinAndSelect: jest.fn().mockReturnThis(), + where: jest.fn().mockReturnThis(), + select: jest.fn().mockReturnThis(), + getOne: jest.fn().mockResolvedValue(null), + }; + + jest.spyOn(tweet_repo, 'createQueryBuilder').mockReturnValue(mock_query_builder as any); + jest.spyOn(tweets_repo, 'attachUserTweetInteractionFlags').mockReturnValue( + mock_query_builder as any + ); + + await expect((tweets_service as any).getTweetWithUserById(tweet_id)).rejects.toThrow( + 'Tweet not found' + ); + }); + }); + + describe('insertTweetHashtags', () => { + it('should extract and insert hashtags', async () => { + const content = 'Test #hashtag1 #hashtag2 #Hashtag1'; + const tweet_id = 'tweet-123'; + const user_id = 'user-123'; + + const upsert_spy = jest + .spyOn(mock_query_runner.manager, 'upsert') + .mockResolvedValue(undefined as any); + + await (tweets_service as any).insertTweetHashtags( + content, + tweet_id, + user_id, + mock_query_runner + ); + + expect(upsert_spy).toHaveBeenCalled(); + }); + + it('should return early if no hashtags found', async () => { + const content = 'No hashtags here'; + const tweet_id = 'tweet-123'; + const user_id = 'user-123'; + + const upsert_spy = jest.spyOn(mock_query_runner.manager, 'upsert'); + + await (tweets_service as any).insertTweetHashtags( + content, + tweet_id, + user_id, + mock_query_runner + ); + + expect(upsert_spy).not.toHaveBeenCalled(); + }); + }); + + describe('mentionNotification', () => { + it('should queue mention notifications for add action', async () => { + const mentioned_user_ids = ['user-1', 'user-2']; + const user_id = 'author-123'; + const tweet = { tweet_id: 'tweet-123', content: 'Test', type: TweetType.TWEET }; + + await (tweets_service as any).mentionNotification( + mentioned_user_ids, + user_id, + tweet, + 'add' + ); + + expect(mention_job_service.queueMentionNotification).toHaveBeenCalledTimes(1); + expect(mention_job_service.queueMentionNotification).toHaveBeenCalledWith({ + tweet, + tweet_id: tweet.tweet_id, + parent_tweet: undefined, + mentioned_by: user_id, + mentioned_user_ids, + tweet_type: tweet.type, + action: 'add', + }); + }); + + it('should queue mention notifications for remove action', async () => { + const mentions = ['user-1', 'user-2']; + const user_id = 'author-123'; + const tweet = { tweet_id: 'tweet-123', type: TweetType.TWEET }; + + await (tweets_service as any).mentionNotification(mentions, user_id, tweet, 'remove'); + + expect(mention_job_service.queueMentionNotification).toHaveBeenCalledTimes(1); + }); + + it('should handle empty mentioned users array', async () => { + await (tweets_service as any).mentionNotification([], 'user-123', {}, 'add'); + + expect(mention_job_service.queueMentionNotification).not.toHaveBeenCalled(); + }); + + it('should handle errors gracefully', async () => { + const mentioned_user_ids = ['user-1']; + const console_error_spy = jest.spyOn(console, 'error').mockImplementation(); + + jest.spyOn(mention_job_service, 'queueMentionNotification').mockRejectedValue( + new Error('Queue error') + ); + + await (tweets_service as any).mentionNotification( + mentioned_user_ids, + 'user-123', + { tweet_id: 'tweet-1', type: TweetType.TWEET }, + 'add' + ); + + expect(console_error_spy).toHaveBeenCalled(); + console_error_spy.mockRestore(); + }); + + it('should deduplicate mentioned user IDs', async () => { + const mentioned_user_ids = ['user-1', 'user-2', 'user-1', 'user-2']; + const user_id = 'author-123'; + const tweet = { tweet_id: 'tweet-123', type: TweetType.TWEET }; + + await (tweets_service as any).mentionNotification( + mentioned_user_ids, + user_id, + tweet, + 'add' + ); + + expect(mention_job_service.queueMentionNotification).toHaveBeenCalledWith( + expect.objectContaining({ + mentioned_user_ids: ['user-1', 'user-2'], + }) + ); + }); + }); + + describe('Edge cases and error handling', () => { + it('should handle transaction rollback when isTransactionActive is false', async () => { + const mock_tweet_dto: CreateTweetDTO = { content: 'Test' } as CreateTweetDTO; + const mock_user_id = 'user-123'; + + mock_query_runner.isTransactionActive = false; + jest.spyOn(mock_query_runner.manager, 'create').mockImplementation(() => { + throw new Error('Create failed'); + }); + + await expect(tweets_service.createTweet(mock_tweet_dto, mock_user_id)).rejects.toThrow( + 'Create failed' + ); + + expect(mock_query_runner.rollbackTransaction).not.toHaveBeenCalled(); + mock_query_runner.isTransactionActive = true; + }); + + it('should handle updateTweet when user is not the owner', async () => { + const tweet_id = 'tweet-123'; + const user_id = 'user-123'; + const update_dto = { content: 'Updated' }; + const existing_tweet = { tweet_id, user_id: 'different-user' }; + + jest.spyOn(mock_query_runner.manager, 'findOne').mockResolvedValue(existing_tweet); + + await expect( + tweets_service.updateTweet(update_dto as any, tweet_id, user_id) + ).rejects.toThrow('User is not allowed to update this tweet'); + + expect(mock_query_runner.rollbackTransaction).toHaveBeenCalled(); + }); + + it('should handle deleteTweet when user is not the owner', async () => { + const tweet_id = 'tweet-123'; + const user_id = 'user-123'; + const existing_tweet = { tweet_id, user_id: 'different-user', type: TweetType.TWEET }; + + jest.spyOn(mock_query_runner.manager, 'findOne').mockResolvedValue(existing_tweet); + + await expect(tweets_service.deleteTweet(tweet_id, user_id)).rejects.toThrow( + 'User is not allowed to delete this tweet' + ); + + expect(mock_query_runner.rollbackTransaction).toHaveBeenCalled(); + }); + + it('should handle deleteTweet error and log it', async () => { + const tweet_id = 'tweet-123'; + const user_id = 'user-123'; + const console_error_spy = jest.spyOn(console, 'error').mockImplementation(); + + jest.spyOn(mock_query_runner.manager, 'findOne').mockRejectedValue( + new Error('DB Error') + ); + + await expect(tweets_service.deleteTweet(tweet_id, user_id)).rejects.toThrow('DB Error'); + expect(console_error_spy).toHaveBeenCalled(); + + console_error_spy.mockRestore(); + }); + + it('should handle unlikeTweet error and log it', async () => { + const tweet_id = 'tweet-123'; + const user_id = 'user-123'; + const console_error_spy = jest.spyOn(console, 'error').mockImplementation(); + + jest.spyOn(mock_query_runner.manager, 'findOne').mockRejectedValue( + new Error('DB Error') + ); + + await expect(tweets_service.unlikeTweet(tweet_id, user_id)).rejects.toThrow('DB Error'); + expect(console_error_spy).toHaveBeenCalled(); + + console_error_spy.mockRestore(); + }); + + it('should handle unbookmarkTweet error and log it', async () => { + const tweet_id = 'tweet-123'; + const user_id = 'user-123'; + const console_error_spy = jest.spyOn(console, 'error').mockImplementation(); + + jest.spyOn(mock_query_runner.manager, 'exists').mockRejectedValue( + new Error('DB Error') + ); + + await expect(tweets_service.unbookmarkTweet(tweet_id, user_id)).rejects.toThrow( + 'DB Error' + ); + expect(console_error_spy).toHaveBeenCalled(); + + console_error_spy.mockRestore(); + }); + + it('should handle deleteRepost error and log it', async () => { + const tweet_id = 'tweet-123'; + const user_id = 'user-123'; + const console_error_spy = jest.spyOn(console, 'error').mockImplementation(); + + jest.spyOn(mock_query_runner.manager, 'findOne').mockRejectedValue( + new Error('DB Error') + ); + + await expect(tweets_service.deleteRepost(tweet_id, user_id)).rejects.toThrow( + 'DB Error' + ); + expect(console_error_spy).toHaveBeenCalled(); + + console_error_spy.mockRestore(); + }); + + it('should handle repostTweetWithQuote when user is same as parent owner', async () => { + const tweet_id = 'tweet-123'; + const user_id = 'user-123'; + const quote_dto = { content: 'Quote' } as CreateTweetDTO; + const parent_tweet = { + tweet_id, + user: { id: user_id }, + }; + + jest.spyOn(tweets_service as any, 'getTweetWithUserById').mockResolvedValue( + parent_tweet + ); + jest.spyOn(tweets_service as any, 'extractDataFromTweets').mockResolvedValue({ + mentioned_user_ids: [], + mentioned_usernames: [], + }); + jest.spyOn(mock_query_runner.manager, 'create').mockReturnValue({} as any); + jest.spyOn(mock_query_runner.manager, 'save').mockResolvedValue({ + tweet_id: 'quote-1', + }); + jest.spyOn(mock_query_runner.manager, 'increment').mockResolvedValue(undefined as any); + jest.spyOn(tweets_service as any, 'insertTweetHashtags').mockResolvedValue(undefined); + + await tweets_service.repostTweetWithQuote(tweet_id, user_id, quote_dto); + + expect(quote_job_service.queueQuoteNotification).not.toHaveBeenCalled(); + }); + + it('should handle replyToTweet when user is same as original tweet owner', async () => { + const tweet_id = 'tweet-123'; + const user_id = 'user-123'; + const reply_dto = { content: 'Reply' } as CreateTweetDTO; + const original_tweet = { tweet_id, user_id }; + + jest.spyOn(mock_query_runner.manager, 'findOne') + .mockResolvedValueOnce(original_tweet) + .mockResolvedValueOnce(null); + jest.spyOn(tweets_service as any, 'extractDataFromTweets').mockResolvedValue({ + mentioned_user_ids: [], + mentioned_usernames: [], + }); + jest.spyOn(mock_query_runner.manager, 'create').mockReturnValue({} as any); + jest.spyOn(mock_query_runner.manager, 'save').mockResolvedValue({ + tweet_id: 'reply-1', + }); + jest.spyOn(mock_query_runner.manager, 'increment').mockResolvedValue(undefined as any); + jest.spyOn(tweets_service as any, 'insertTweetHashtags').mockResolvedValue(undefined); + + await tweets_service.replyToTweet(tweet_id, user_id, reply_dto); + + expect(reply_job_service.queueReplyNotification).not.toHaveBeenCalled(); + }); + + it('should handle likeTweet when user is the tweet owner', async () => { + const tweet_id = 'tweet-123'; + const user_id = 'user-123'; + const tweet = { tweet_id, user_id }; + + jest.spyOn(mock_query_runner.manager, 'findOne').mockResolvedValue(tweet); + jest.spyOn(mock_query_runner.manager, 'create').mockReturnValue({} as any); + jest.spyOn(mock_query_runner.manager, 'insert').mockResolvedValue(undefined as any); + jest.spyOn(mock_query_runner.manager, 'increment').mockResolvedValue(undefined as any); + + await tweets_service.likeTweet(tweet_id, user_id); + + expect(mock_query_runner.commitTransaction).toHaveBeenCalled(); + // Should not queue notification when liking own tweet + }); + + it('should handle unlikeTweet when user is the tweet owner', async () => { + const tweet_id = 'tweet-123'; + const user_id = 'user-123'; + const tweet = { tweet_id, user_id }; + + jest.spyOn(mock_query_runner.manager, 'findOne').mockResolvedValue(tweet); + jest.spyOn(mock_query_runner.manager, 'delete').mockResolvedValue({ + affected: 1, + } as any); + jest.spyOn(mock_query_runner.manager, 'decrement').mockResolvedValue(undefined as any); + + await tweets_service.unlikeTweet(tweet_id, user_id); + + expect(mock_query_runner.commitTransaction).toHaveBeenCalled(); + }); + + it('should handle repostTweet when user is the tweet owner', async () => { + const tweet_id = 'tweet-123'; + const user_id = 'user-123'; + const tweet = { tweet_id, user_id }; + + jest.spyOn(mock_query_runner.manager, 'findOne').mockResolvedValue(tweet); + jest.spyOn(mock_query_runner.manager, 'create').mockReturnValue({} as any); + jest.spyOn(mock_query_runner.manager, 'insert').mockResolvedValue(undefined as any); + jest.spyOn(mock_query_runner.manager, 'increment').mockResolvedValue(undefined as any); + + await tweets_service.repostTweet(tweet_id, user_id); + + expect(mock_query_runner.commitTransaction).toHaveBeenCalled(); + }); + }); + + describe('updateHashtags', () => { + it('should update hashtags', async () => { + const names = ['hashtag1', 'hashtag2']; + const user_id = 'user-123'; + const tweet_id = 'tweet-123'; + + jest.spyOn(mock_query_runner.manager, 'upsert').mockResolvedValue(undefined as any); + + await (tweets_service as any).updateHashtags( + names, + user_id, + mock_query_runner, + tweet_id + ); + + expect(mock_query_runner.manager.upsert).toHaveBeenCalled(); + }); + }); + + describe('buildDefaultHashtagTopics', () => { + it('should build default hashtag topics', () => { + const hashtags = ['sports', 'football']; + const topic = 'Sports'; + + const result = (tweets_service as any).buildDefaultHashtagTopics(hashtags, topic); + + expect(result).toHaveProperty('sports'); + expect(result.sports.Sports).toBe(100); + }); + }); + + describe('createFakeTrendTweet', () => { + it('should create fake trend tweet', async () => { + const content = 'Fake trend #sports'; + const user_id = 'user-123'; + const hashtag_topics = { sports: { Sports: 100 } }; + + jest.spyOn(tweets_service as any, 'extractDataFromTweets').mockResolvedValue({ + mentioned_user_ids: [], + mentioned_usernames: [], + }); + jest.spyOn(mock_query_runner.manager, 'create').mockReturnValue({ + tweet_id: 'fake-1', + } as any); + jest.spyOn(mock_query_runner.manager, 'save').mockResolvedValue({ tweet_id: 'fake-1' }); + jest.spyOn(tweets_service as any, 'insertTweetHashtags').mockResolvedValue(undefined); + + const result = await tweets_service.createFakeTrendTweet( + content, + user_id, + hashtag_topics + ); + + expect(result).toBeDefined(); + expect(mock_query_runner.commitTransaction).toHaveBeenCalled(); + }); + }); + + describe('deleteTweetsByUserId', () => { + it('should delete all tweets by user', async () => { + const user_id = 'user-123'; + + const mock_tweets = [ + { tweet_id: 'tweet-1', type: TweetType.TWEET }, + { tweet_id: 'tweet-2', type: TweetType.REPLY }, + ]; + + (tweet_repo.find as jest.Mock).mockResolvedValue(mock_tweets); + jest.spyOn(mock_query_runner.manager, 'delete').mockResolvedValue({ + affected: 2, + } as any); + jest.spyOn(tweets_service as any, 'queueRepostAndQuoteDeleteJobs').mockResolvedValue( + undefined + ); + + await tweets_service.deleteTweetsByUserId(user_id); + + expect(tweet_repo.find).toHaveBeenCalledWith({ + where: { user_id }, + select: ['tweet_id', 'user_id', 'type'], + }); + expect(mock_query_runner.commitTransaction).toHaveBeenCalled(); + }); + + it('should return early when no tweets found', async () => { + const user_id = 'user-123'; + const console_log_spy = jest.spyOn(console, 'log').mockImplementation(); + + (tweet_repo.find as jest.Mock).mockResolvedValue([]); + + await tweets_service.deleteTweetsByUserId(user_id); + + expect(console_log_spy).toHaveBeenCalledWith(`No tweets found for user ${user_id}`); + expect(mock_query_runner.commitTransaction).not.toHaveBeenCalled(); + + console_log_spy.mockRestore(); + }); + + it('should handle errors during deletion', async () => { + const user_id = 'user-123'; + const console_error_spy = jest.spyOn(console, 'error').mockImplementation(); + + (tweet_repo.find as jest.Mock).mockRejectedValue(new Error('DB Error')); + + await expect(tweets_service.deleteTweetsByUserId(user_id)).rejects.toThrow('DB Error'); + expect(mock_query_runner.rollbackTransaction).toHaveBeenCalled(); + + console_error_spy.mockRestore(); + }); + }); + + describe('getReplyWithUserById', () => { + it('should return reply with parent chain', async () => { + const tweet_id = 'reply-123'; + const current_user_id = 'user-123'; + + const mock_reply_chain = [ + { tweet_id: 'reply-123', type: TweetType.REPLY }, + { tweet_id: 'parent-123', type: TweetType.TWEET }, + ]; + + jest.spyOn(tweets_repo, 'getReplyWithParentChain').mockResolvedValue( + mock_reply_chain as any + ); + jest.spyOn(tweets_service as any, 'incrementTweetViewsAsync').mockResolvedValue( + undefined + ); + + const result = await (tweets_service as any).getReplyWithUserById( + tweet_id, + current_user_id + ); + + expect(result).toBeDefined(); + expect(tweets_repo.getReplyWithParentChain).toHaveBeenCalledWith( + tweet_id, + current_user_id + ); + }); + + it('should throw NotFoundException when reply chain is empty', async () => { + const tweet_id = 'nonexistent'; + + jest.spyOn(tweets_repo, 'getReplyWithParentChain').mockResolvedValue([]); + + await expect((tweets_service as any).getReplyWithUserById(tweet_id)).rejects.toThrow( + 'Tweet not found' + ); + }); + }); }); diff --git a/src/tweets/tweets.service.ts b/src/tweets/tweets.service.ts index 6e653bb8..c090c956 100644 --- a/src/tweets/tweets.service.ts +++ b/src/tweets/tweets.service.ts @@ -1,4 +1,3 @@ -/* eslint-disable */ import { BadRequestException, ForbiddenException, @@ -6,14 +5,7 @@ import { NotFoundException, } from '@nestjs/common'; import { InjectRepository } from '@nestjs/typeorm'; -import { - DataSource, - In, - QueryRunner, - Repository, - SelectQueryBuilder, - ObjectLiteral, -} from 'typeorm'; +import { DataSource, In, QueryRunner, Repository } from 'typeorm'; import { UploadMediaResponseDTO } from './dto/upload-media.dto'; import { CreateTweetDTO, @@ -21,8 +13,6 @@ import { PaginatedTweetRepostsResponseDTO, UpdateTweetDTO, } from './dto'; -import { promises as fs } from 'fs'; -import * as path from 'path'; import { TweetResponseDTO } from './dto/tweet-response.dto'; import { PostgresErrorCodes } from '../shared/enums/postgres-error-codes'; import { Tweet } from './entities/tweet.entity'; @@ -31,23 +21,20 @@ import { TweetRepost } from './entities/tweet-repost.entity'; import { TweetQuote } from './entities/tweet-quote.entity'; import { TweetReply } from './entities/tweet-reply.entity'; import { TweetBookmark } from './entities/tweet-bookmark.entity'; +import { TweetHashtag } from './entities/tweet-hashtag.entity'; import { Hashtag } from './entities/hashtags.entity'; import { UserFollows } from '../user/entities/user-follows.entity'; import { User } from '../user/entities/user.entity'; import { PaginationService } from 'src/shared/services/pagination/pagination.service'; import { BlobServiceClient } from '@azure/storage-blob'; import { TweetsRepository } from './tweets.repository'; -import { TimelinePaginationDto } from 'src/timeline/dto/timeline-pagination.dto'; import { GetTweetRepliesQueryDto } from './dto'; import { plainToInstance } from 'class-transformer'; import { TweetQuoteResponseDTO } from './dto/tweet-quote-reponse'; -import { AzureStorageService } from 'src/azure-storage/azure-storage.service'; import { TweetReplyResponseDTO } from './dto/tweet-reply-response'; import { TweetType } from 'src/shared/enums/tweet-types.enum'; -import { UserPostsView } from './entities/user-posts-view.entity'; -import e from 'express'; import { tweet_fields_slect } from './queries/tweet-fields-select.query'; -import { categorize_prompt, summarize_prompt, TOPICS } from './constants'; +import { categorize_prompt, TOPICS } from './constants'; import { CompressVideoJobService } from 'src/background-jobs/videos/compress-video.service'; import { ReplyJobService } from 'src/background-jobs/notifications/reply/reply.service'; import { LikeJobService } from 'src/background-jobs/notifications/like/like.service'; @@ -66,9 +53,9 @@ import { TweetSummary } from './entities/tweet-summary.entity'; import { TweetSummaryResponseDTO } from './dto/tweet-summary-response.dto'; ffmpeg.setFfmpegPath(ffmpegInstaller.path); -import { TrendService } from 'src/trend/trend.service'; import { HashtagJobService } from 'src/background-jobs/hashtag/hashtag.service'; +import { extractHashtags } from 'twitter-text'; @Injectable() export class TweetsService { constructor( @@ -84,16 +71,13 @@ export class TweetsService { private readonly tweet_reply_repository: Repository, @InjectRepository(TweetBookmark) private readonly tweet_bookmark_repository: Repository, - @InjectRepository(UserFollows) - private readonly user_follows_repository: Repository, - @InjectRepository(UserPostsView) - private readonly user_posts_view_repository: Repository, + @InjectRepository(User) + private readonly user_repository: Repository, @InjectRepository(TweetSummary) private readonly tweet_summary_repository: Repository, - private data_source: DataSource, + private readonly data_source: DataSource, private readonly paginate_service: PaginationService, private readonly tweets_repository: TweetsRepository, - private readonly azure_storage_service: AzureStorageService, private readonly reply_job_service: ReplyJobService, private readonly like_job_service: LikeJobService, private readonly hashtag_job_service: HashtagJobService, @@ -113,6 +97,20 @@ export class TweetsService { apiKey: process.env.GROQ_API_KEY ?? '', }); + private async incrementTweetViewsAsync(tweet_ids: string[]): Promise { + if (!tweet_ids.length) return; + + try { + // Call PostgreSQL function to increment views in batch + await this.data_source.query('SELECT increment_tweet_views_batch($1::uuid[])', [ + tweet_ids, + ]); + } catch (error) { + // Log error but don't fail the request + console.error('Failed to increment tweet views:', error); + } + } + /** * Handles image upload processing * @param file - The uploaded image file (in memory, not saved to disk) @@ -193,13 +191,13 @@ export class TweetsService { */ private convertToCompressedMp4(video_buffer: Buffer): Promise { return new Promise((resolve, reject) => { - const inputStream = new Readable(); - inputStream.push(video_buffer); - inputStream.push(null); + const input_stream = new Readable(); + input_stream.push(video_buffer); + input_stream.push(null); - const outputChunks: Buffer[] = []; + const output_chunks: Buffer[] = []; - ffmpeg(inputStream) + ffmpeg(input_stream) .outputOptions([ '-vcodec libx264', '-crf 28', @@ -214,11 +212,11 @@ export class TweetsService { }) .on('end', () => { console.log('FFmpeg conversion completed'); - resolve(Buffer.concat(outputChunks)); + resolve(Buffer.concat(output_chunks)); }) .pipe() .on('data', (chunk: Buffer) => { - outputChunks.push(chunk); + output_chunks.push(chunk); }) .on('error', (error) => { console.error('Stream error:', error); @@ -299,24 +297,35 @@ export class TweetsService { await query_runner.startTransaction(); try { - const mentions = await this.extractDataFromTweets(tweet, user_id, query_runner); + const { mentioned_user_ids, mentioned_usernames } = await this.extractDataFromTweets( + tweet, + user_id, + query_runner + ); + // watch the error which could exist if user id not found here const new_tweet = query_runner.manager.create(Tweet, { user_id, type: TweetType.TWEET, + mentions: mentioned_usernames, ...tweet, }); const saved_tweet = await query_runner.manager.save(Tweet, new_tweet); + + // Extract normalized hashtags to insert into tweet_hashtags table + const { content } = tweet; + await this.insertTweetHashtags(content, saved_tweet.tweet_id, user_id, query_runner); + await query_runner.commitTransaction(); await this.es_index_tweet_service.queueIndexTweet({ tweet_id: saved_tweet.tweet_id, }); + console.log(mentioned_user_ids); + // Send mention notifications after tweet is saved - if (mentions.length > 0) { - await this.mentionNotification(mentions, user_id, saved_tweet); - } + await this.mentionNotification(mentioned_user_ids, user_id, saved_tweet, 'add'); return plainToInstance(TweetResponseDTO, saved_tweet, { excludeExtraneousValues: true, @@ -344,19 +353,28 @@ export class TweetsService { await query_runner.startTransaction(); try { - const mentions = await this.extractDataFromTweets(tweet, user_id, query_runner); - const tweet_to_update = await query_runner.manager.findOne(Tweet, { where: { tweet_id }, }); if (!tweet_to_update) throw new NotFoundException('Tweet not found'); - query_runner.manager.merge(Tweet, tweet_to_update, { ...tweet }); + const { mentioned_user_ids, mentioned_usernames } = await this.extractDataFromTweets( + tweet, + user_id, + query_runner + ); + + query_runner.manager.merge(Tweet, tweet_to_update, { + ...tweet, + mentions: mentioned_usernames, + }); if (tweet_to_update.user_id !== user_id) throw new BadRequestException('User is not allowed to update this tweet'); + await query_runner.manager.delete(TweetSummary, { tweet_id }); + const updated_tweet = await query_runner.manager.save(Tweet, tweet_to_update); await query_runner.commitTransaction(); @@ -365,11 +383,8 @@ export class TweetsService { }); // Send mention notifications for updated tweet - if (mentions.length > 0) { - await this.mentionNotification(mentions, user_id, updated_tweet); - } + await this.mentionNotification(mentioned_user_ids, user_id, updated_tweet, 'add'); - // return TweetMapper.toDTO(tweet_with_type_info); return plainToInstance(TweetResponseDTO, updated_tweet, { excludeExtraneousValues: true, }); @@ -385,8 +400,12 @@ export class TweetsService { // hard delete tweet async deleteTweet(tweet_id: string, user_id: string): Promise { + const query_runner = this.data_source.createQueryRunner(); + await query_runner.connect(); + await query_runner.startTransaction(); + try { - const tweet = await this.tweet_repository.findOne({ + const tweet = await query_runner.manager.findOne(Tweet, { where: { tweet_id }, select: ['tweet_id', 'user_id', 'type'], }); @@ -397,69 +416,72 @@ export class TweetsService { throw new BadRequestException('User is not allowed to delete this tweet'); } - await this.queueRepostAndQuoteDeleteJobs(tweet, tweet.type, user_id); + // If it's a reply, decrement reply count for all parent tweets + await this.queueRepostAndQuoteDeleteJobs(tweet, tweet.type, user_id, query_runner); - await this.tweet_repository.delete({ tweet_id }); + await query_runner.manager.delete(Tweet, { tweet_id }); + await query_runner.commitTransaction(); await this.es_delete_tweet_service.queueDeleteTweet({ - tweet_id, + tweet_ids: [tweet_id], }); } catch (error) { console.error(error); + if (query_runner.isTransactionActive) { + await query_runner.rollbackTransaction(); + } throw error; + } finally { + await query_runner.release(); } } async getTweetSummary(tweet_id: string): Promise { - try { - const tweet = await this.tweet_repository.findOne({ - where: { tweet_id }, - select: ['content', 'tweet_id'], - }); - if (!tweet) throw new NotFoundException('Tweet not found'); + const tweet = await this.tweet_repository.findOne({ + where: { tweet_id }, + select: ['content', 'tweet_id'], + }); + if (!tweet) throw new NotFoundException('Tweet not found'); - const cleanedContent = tweet.content - .replace(/#[a-zA-Z0-9_]+/g, '') - .replace(/\s+/g, ' ') - .trim(); + const cleaned_content = tweet.content + .replaceAll(/#[a-zA-Z0-9_]+/g, '') + .replaceAll(/\s+/g, ' ') + .trim(); - if (cleanedContent.length < 120) { - throw new BadRequestException('Tweet content too short for summary generation.'); - } + if (cleaned_content.length < 120) { + throw new BadRequestException('Tweet content too short for summary generation.'); + } - let tweet_summary = await this.tweet_summary_repository.findOne({ - where: { tweet_id }, + let tweet_summary = await this.tweet_summary_repository.findOne({ + where: { tweet_id }, + }); + + if (!tweet_summary) { + // Queue the summary generation job + await this.ai_summary_job_service.queueGenerateSummary({ + tweet_id, + content: tweet.content, }); - if (!tweet_summary) { - // Queue the summary generation job - await this.ai_summary_job_service.queueGenerateSummary({ - tweet_id, - content: tweet.content, + // Wait for the job to complete (with polling) + for (let i = 0; i < 15; i++) { + await new Promise((resolve) => setTimeout(resolve, 250)); + tweet_summary = await this.tweet_summary_repository.findOne({ + where: { tweet_id }, }); - - // Wait for the job to complete (with polling) - for (let i = 0; i < 15; i++) { - await new Promise((resolve) => setTimeout(resolve, 250)); - tweet_summary = await this.tweet_summary_repository.findOne({ - where: { tweet_id }, - }); - if (tweet_summary) { - return { - tweet_id, - summary: tweet_summary.summary, - }; - } + if (tweet_summary) { + return { + tweet_id, + summary: tweet_summary.summary, + }; } - throw new NotFoundException('Failed to generate summary after retry.'); } - return { - tweet_id, - summary: tweet_summary.summary, - }; - } catch (error) { - throw error; + throw new NotFoundException('Failed to generate summary after retry.'); } + return { + tweet_id, + summary: tweet_summary.summary, + }; } async getTweetById(tweet_id: string, current_user_id?: string): Promise { @@ -501,17 +523,14 @@ export class TweetsService { await query_runner.manager.increment(Tweet, { tweet_id }, 'num_likes', 1); await query_runner.commitTransaction(); - if (tweet.user_id !== user_id) + if (tweet.user_id !== user_id) { this.like_job_service.queueLikeNotification({ tweet, like_to: tweet.user_id, liked_by: user_id, action: 'add', }); - - await this.es_index_tweet_service.queueIndexTweet({ - tweet_id, - }); + } await this.es_index_tweet_service.queueIndexTweet({ tweet_id, @@ -630,13 +649,18 @@ export class TweetsService { await query_runner.startTransaction(); try { - const parentTweet = await this.getTweetWithUserById(tweet_id, user_id, false); + const parent_tweet = await this.getTweetWithUserById(tweet_id, user_id, false); - const mentions = await this.extractDataFromTweets(quote, user_id, query_runner); + const { mentioned_user_ids, mentioned_usernames } = await this.extractDataFromTweets( + quote, + user_id, + query_runner + ); const new_quote_tweet = query_runner.manager.create(Tweet, { ...quote, user_id, + mentions: mentioned_usernames, type: TweetType.QUOTE, }); const saved_quote_tweet = await query_runner.manager.save(Tweet, new_quote_tweet); @@ -649,49 +673,54 @@ export class TweetsService { await query_runner.manager.save(TweetQuote, tweet_quote); await query_runner.manager.increment(Tweet, { tweet_id }, 'num_quotes', 1); - await query_runner.commitTransaction(); - await this.es_index_tweet_service.queueIndexTweet({ - tweet_id: saved_quote_tweet.tweet_id, - parent_id: saved_quote_tweet.tweet_id, - }); + const { content } = quote; + await this.insertTweetHashtags( + content, + saved_quote_tweet.tweet_id, + user_id, + query_runner + ); + await query_runner.manager.increment(Tweet, { tweet_id }, 'num_reposts', 1); + await query_runner.commitTransaction(); await this.es_index_tweet_service.queueIndexTweet({ tweet_id: saved_quote_tweet.tweet_id, - parent_id: saved_quote_tweet.tweet_id, + parent_id: tweet_id, }); const response = plainToInstance(TweetQuoteResponseDTO, { ...saved_quote_tweet, - quoted_tweet: plainToInstance(TweetResponseDTO, parentTweet, { + quoted_tweet: plainToInstance(TweetResponseDTO, parent_tweet, { excludeExtraneousValues: true, }), }); - console.log('parentTweet', parentTweet); - - if (parentTweet.user?.id && user_id !== parentTweet.user.id) + if (parent_tweet.user?.id && user_id !== parent_tweet.user.id) this.quote_job_service.queueQuoteNotification({ - quote_to: parentTweet.user.id, + quote_to: parent_tweet.user.id, quoted_by: user_id, quote_tweet: saved_quote_tweet, - parent_tweet: parentTweet, + parent_tweet: parent_tweet, action: 'add', }); + const mentioned_user_ids_without_original_author = mentioned_user_ids.filter( + (mentioned_user_id) => mentioned_user_id !== parent_tweet.user.id + ); + // Send mention notifications for quote tweet - if (mentions.length > 0) { - await this.mentionNotification( - mentions, - user_id, - saved_quote_tweet, - plainToInstance(TweetResponseDTO, parentTweet, { - excludeExtraneousValues: true, - }) - ); - } + await this.mentionNotification( + mentioned_user_ids_without_original_author, + user_id, + saved_quote_tweet, + 'add', + plainToInstance(TweetResponseDTO, parent_tweet, { + excludeExtraneousValues: true, + }) + ); - // I guess this should vbe returned, it was not returned before + // I guess this should be returned, it was not returned before return response; } catch (error) { await query_runner.rollbackTransaction(); @@ -716,6 +745,7 @@ export class TweetsService { await query_runner.manager.insert(TweetRepost, new_repost); await query_runner.manager.increment(Tweet, { tweet_id }, 'num_reposts', 1); await query_runner.commitTransaction(); + // await this.data_source.query('REFRESH MATERIALIZED VIEW user_posts_view'); if (tweet.user_id !== user_id) this.repost_job_service.queueRepostNotification({ @@ -799,7 +829,6 @@ export class TweetsService { const [original_tweet, original_reply] = await Promise.all([ query_runner.manager.findOne(Tweet, { where: { tweet_id: original_tweet_id }, - select: ['tweet_id', 'user_id'], }), query_runner.manager.findOne(TweetReply, { where: { reply_tweet_id: original_tweet_id }, @@ -808,12 +837,17 @@ export class TweetsService { if (!original_tweet) throw new NotFoundException('Original tweet not found'); - const mentions = await this.extractDataFromTweets(reply_dto, user_id, query_runner); + const { mentioned_user_ids, mentioned_usernames } = await this.extractDataFromTweets( + reply_dto, + user_id, + query_runner + ); // Create the reply tweet const new_reply_tweet = query_runner.manager.create(Tweet, { ...reply_dto, user_id, + mentions: mentioned_usernames, type: TweetType.REPLY, }); const saved_reply_tweet = await query_runner.manager.save(Tweet, new_reply_tweet); @@ -827,30 +861,43 @@ export class TweetsService { }); await query_runner.manager.save(TweetReply, tweet_reply); - // Increment reply count on original tweet await query_runner.manager.increment( Tweet, { tweet_id: original_tweet_id }, 'num_replies', 1 ); + // Extract normalized hashtags to insert into tweet_hashtags table + const { content } = reply_dto; + await this.insertTweetHashtags( + content, + saved_reply_tweet.tweet_id, + user_id, + query_runner + ); await query_runner.commitTransaction(); if (user_id !== original_tweet.user_id) this.reply_job_service.queueReplyNotification({ reply_tweet: saved_reply_tweet, - original_tweet_id: original_tweet_id, + original_tweet: original_tweet, replied_by: user_id, reply_to: original_tweet.user_id, conversation_id: original_reply?.conversation_id || original_tweet_id, action: 'add', }); - // Send mention notifications for reply - if (mentions.length > 0) { - await this.mentionNotification(mentions, user_id, saved_reply_tweet); - } + const mentioned_user_ids_without_original_author = mentioned_user_ids.filter( + (mentioned_user_id) => mentioned_user_id !== original_tweet.user_id + ); + + await this.mentionNotification( + mentioned_user_ids_without_original_author, + user_id, + saved_reply_tweet, + 'add' + ); const returned_reply = plainToInstance( TweetReplyResponseDTO, @@ -878,6 +925,84 @@ export class TweetsService { } } + async getTweetReplies( + tweet_id: string, + current_user_id: string, + query_dto: GetTweetRepliesQueryDto + ): Promise<{ + data: TweetResponseDTO[]; + count: number; + next_cursor: string | null; + has_more: boolean; + }> { + // Verify the tweet exists + const original_tweet = await this.tweet_repository.findOne({ + where: { tweet_id }, + select: ['tweet_id'], + }); + + if (!original_tweet) { + throw new NotFoundException('Tweet not found'); + } + + const limit = query_dto.limit ?? 20; + + // Build query to get only direct replies (one level) + let query = this.tweet_repository + .createQueryBuilder('tweet') + .leftJoinAndSelect('tweet.user', 'user') + .innerJoin('tweet_replies', 'reply', 'reply.reply_tweet_id = tweet.tweet_id') + .where('reply.original_tweet_id = :tweet_id', { tweet_id }) + .select(tweet_fields_slect) + .orderBy('tweet.created_at', 'DESC') + .take(limit + 1); + + // Attach user interaction flags (is_liked, is_reposted, is_bookmarked, is_following_author) + query = this.tweets_repository.attachUserTweetInteractionFlags( + query, + current_user_id, + 'tweet' + ); + + this.paginate_service.applyCursorPagination( + query, + query_dto.cursor, + 'tweet', + 'created_at', + 'tweet_id' + ); + + const tweets = await query.getMany(); + + // Increment views for reply tweets + const tweet_ids = tweets.map((t) => t.tweet_id).filter(Boolean); + this.incrementTweetViewsAsync(tweet_ids).catch(() => {}); + + const tweets_dto = plainToInstance(TweetResponseDTO, tweets, { + excludeExtraneousValues: true, + }); + + let has_more = false; + let next_cursor: string | null = null; + if (tweets_dto.length > limit) { + tweets_dto.pop(); + has_more = true; + } else { + next_cursor = this.paginate_service.generateNextCursor( + tweets_dto, + 'created_at', + 'tweet_id' + ); + } + + return { + data: tweets_dto, + count: tweets_dto.length, + next_cursor, + has_more: has_more, + }; + } + async incrementTweetViews(tweet_id: string): Promise<{ success: boolean }> { try { const tweet = await this.tweet_repository.findOne({ @@ -1097,6 +1222,10 @@ export class TweetsService { const quotes = await query.getMany(); + // Increment views for quote tweets + const tweet_ids = quotes.map((q) => q.quote_tweet?.tweet_id).filter(Boolean); + this.incrementTweetViewsAsync(tweet_ids).catch(() => {}); + // Map to DTOs const quote_dtos = quotes.map((quote) => { const quote_temp = plainToInstance(TweetQuoteResponseDTO, quote.quote_tweet, { @@ -1127,57 +1256,82 @@ export class TweetsService { private async queueRepostAndQuoteDeleteJobs( tweet: Tweet, type: TweetType, - user_id: string + user_id: string, + query_runner: QueryRunner ): Promise { try { if (type === TweetType.REPLY) { - const tweet_reply = await this.tweet_reply_repository.findOne({ + const reply_info = await query_runner.manager.findOne(TweetReply, { where: { reply_tweet_id: tweet.tweet_id }, + select: ['original_tweet_id'], }); - if (tweet_reply?.original_tweet_id) { - const original_tweet = await this.tweet_repository.findOne({ - where: { tweet_id: tweet_reply.original_tweet_id }, + if (reply_info?.original_tweet_id) { + // Decrement reply count only on the direct parent tweet + await query_runner.manager.decrement( + Tweet, + { tweet_id: reply_info.original_tweet_id }, + 'num_replies', + 1 + ); + + const original_tweet = await query_runner.manager.findOne(Tweet, { + where: { tweet_id: reply_info.original_tweet_id }, select: ['user_id'], }); const parent_owner_id = original_tweet?.user_id || null; - if (!parent_owner_id) return; - - this.reply_job_service.queueReplyNotification({ - reply_tweet_id: tweet.tweet_id, - reply_to: parent_owner_id || user_id, - replied_by: user_id, - action: 'remove', - }); + if (parent_owner_id) { + this.reply_job_service.queueReplyNotification({ + reply_tweet_id: tweet.tweet_id, + reply_to: parent_owner_id, + replied_by: user_id, + action: 'remove', + }); + } } } else if (type === TweetType.QUOTE) { - const tweet_quote = await this.tweet_quote_repository.findOne({ + const tweet_quote = await query_runner.manager.findOne(TweetQuote, { where: { quote_tweet_id: tweet.tweet_id }, }); if (tweet_quote?.original_tweet_id) { - const original_tweet = await this.tweet_repository.findOne({ + // Decrement quote count on direct parent only + await query_runner.manager.decrement( + Tweet, + { tweet_id: tweet_quote.original_tweet_id }, + 'num_quotes', + 1 + ); + + await query_runner.manager.decrement( + Tweet, + { tweet_id: tweet_quote.original_tweet_id }, + 'num_reposts', + 1 + ); + + const original_tweet = await query_runner.manager.findOne(Tweet, { where: { tweet_id: tweet_quote.original_tweet_id }, select: ['user_id'], }); const parent_owner_id = original_tweet?.user_id || null; - if (!parent_owner_id) return; - - this.quote_job_service.queueQuoteNotification({ - quote_tweet_id: tweet.tweet_id, - quote_to: parent_owner_id, - quoted_by: user_id, - action: 'remove', - }); + if (parent_owner_id) { + this.quote_job_service.queueQuoteNotification({ + quote_tweet_id: tweet.tweet_id, + quote_to: parent_owner_id, + quoted_by: user_id, + action: 'remove', + }); + } } } // Handle mention notifications removal for any tweet type await this.queueMentionDeleteJobs(tweet, user_id); } catch (error) { - console.error('Error fetching parent tweet owner:', error); + console.error('Error in queueRepostAndQuoteDeleteJobs:', error); } } @@ -1195,17 +1349,7 @@ export class TweetsService { const mentions = full_tweet.content.match(/@([a-zA-Z0-9_]+)/g) || []; if (mentions.length === 0) return; - // Remove @ symbol and make unique - const clean_usernames = [...new Set(mentions.map((u) => u.replace('@', '')))]; - - // Queue mention removal notification (background job will fetch user IDs) - await this.mention_job_service.queueMentionNotification({ - tweet_id: tweet.tweet_id, - mentioned_by: user_id, - mentioned_usernames: clean_usernames, - tweet_type: 'tweet', - action: 'remove', - }); + await this.mentionNotification(mentions, user_id, tweet, 'remove'); } catch (error) { console.error('Error queueing mention removal notifications:', error); } @@ -1214,9 +1358,7 @@ export class TweetsService { private async getTweetWithUserById( tweet_id: string, current_user_id?: string, - flag: boolean = true, - include_replies: boolean = true, - replies_limit: number = 3 + flag: boolean = true ): Promise { try { let query = this.tweet_repository @@ -1235,6 +1377,9 @@ export class TweetsService { const tweet = await query.getOne(); if (!tweet) throw new NotFoundException('Tweet not found'); + // Increment view count asynchronously + this.incrementTweetViewsAsync([tweet_id]).catch(() => {}); + // Transform current tweet to DTO const tweet_dto = plainToInstance(TweetResponseDTO, tweet, { excludeExtraneousValues: true, @@ -1253,16 +1398,6 @@ export class TweetsService { } } - // Fetch limited replies if requested and tweet has replies - if (include_replies && tweet.num_replies > 0) { - const replies_result = await this.tweets_repository.getReplies( - tweet_id, - current_user_id, - { limit: replies_limit } - ); - tweet_dto.replies = replies_result.tweets; - } - return tweet_dto; } catch (error) { console.error(error); @@ -1284,6 +1419,10 @@ export class TweetsService { throw new NotFoundException('Tweet not found'); } + // Increment views for all tweets in the reply chain + const tweet_ids = reply_chain.map((t) => t.tweet_id).filter(Boolean); + this.incrementTweetViewsAsync(tweet_ids).catch(() => {}); + // Build nested structure from deepest parent to starting tweet let parent_tweet_dto: TweetResponseDTO | null = null; @@ -1325,38 +1464,73 @@ export class TweetsService { private async extractDataFromTweets( tweet: CreateTweetDTO | UpdateTweetDTO, user_id: string, - query_runner: QueryRunner - ): Promise { + query_runner: QueryRunner, + skip_extract_topics: boolean = false, + predefined_hashtag_topics?: Record> + ): Promise<{ mentioned_user_ids: string[]; mentioned_usernames: string[] }> { + if (!tweet?.content) return { mentioned_user_ids: [], mentioned_usernames: [] }; const { content } = tweet; - if (!content) return []; + console.log('content:', content); // Extract mentions and return them for later processing - const mentions = content.match(/@([a-zA-Z0-9_]+)/g) || []; + const mentions = + content.match(/@([a-zA-Z0-9_]+)/g)?.map((mention) => mention.slice(1)) || []; // Extract hashtags and remove duplicates - // Extract hashtags and remove duplicates - const hashtags = - content.match(/#([a-zA-Z0-9_]+)/g)?.map((hashtag) => hashtag.slice(1)) || []; + const hashtags = content.match(/#([\p{L}\p{N}_]+)/gu)?.map((h) => h.slice(1)) || []; const unique_hashtags = [...new Set(hashtags)]; - await this.updateHashtags(unique_hashtags, user_id, query_runner); + const normalized_hashtags = hashtags.map((hashtag) => { + return hashtag.toLowerCase(); + }); + + // Extract topics using Groq AI or use predefined topics + if (!skip_extract_topics) { + const topics = await this.extractTopics(content, unique_hashtags); + console.log('Extracted topics:', topics); - // Extract topics using Groq AI - const topics = await this.extractTopics(content, unique_hashtags); - console.log('Extracted topics:', topics); + //Insert Hashtag with Topics in redis + await this.hashtag_job_service.queueHashtag({ + hashtags: topics.hashtags, + timestamp: Date.now(), + }); + } else if (predefined_hashtag_topics) { + // For fake trends: use predefined topics + console.log('Using predefined hashtag topics for fake trend'); - //Insert Hashtag with Topics in redis + await this.hashtag_job_service.queueHashtag({ + hashtags: predefined_hashtag_topics, + timestamp: Date.now(), + }); + } - await this.hashtag_job_service.queueHashtag({ - hashtags: topics.hashtags, - timestamp: Date.now(), + const mentioned_users = await this.user_repository.find({ + where: { username: In(mentions) }, + select: ['username', 'id'], }); - // You can store topics in the tweet entity or use them for recommendations - // For example, you could add a 'topics' field to your Tweet entity - // tweet.topics = topics; + const mapped_users = new Map(); + + for (const mention of mentions) { + const found = mentioned_users.find((u) => u.username === mention); + if (found) mapped_users.set(mention, found.id); + } - return mentions; + const mentioned_user_ids: string[] = []; + const mentioned_usernames: string[] = []; + + mentions.forEach((mention, index) => { + const id = mapped_users.get(mention); + + if (id) { + tweet.content = tweet.content?.replace(`@${mention}`, `\u200B$(${index})\u200C`); + + mentioned_usernames.push(mention); + mentioned_user_ids.push(id); + } + }); + + return { mentioned_user_ids, mentioned_usernames }; } async extractTopics( @@ -1378,11 +1552,11 @@ export class TweetsService { return { tweet: empty, hashtags: result }; } - + console.log('HASHTAGS: ', hashtags); // remove hashtags and extra spaces content = content - .replace(/#[a-zA-Z0-9_]+/g, '') - .replace(/\s+/g, ' ') + .replaceAll(/#[^\s]+/g, '') // remove anything starting with + .replaceAll(/\s+/g, ' ') .trim(); const prompt = categorize_prompt(content, hashtags); @@ -1393,8 +1567,8 @@ export class TweetsService { temperature: 0, }); - const rawText = response.choices?.[0]?.message?.content?.trim() ?? ''; - if (!rawText) { + const raw_text = response.choices?.[0]?.message?.content?.trim() ?? ''; + if (!raw_text) { console.warn('Groq returned empty response'); const empty: Record = {}; TOPICS.forEach((t) => (empty[t] = 0)); @@ -1406,11 +1580,11 @@ export class TweetsService { return { tweet: empty, hashtags: result }; } - let jsonText = rawText; - const m = rawText.match(/\{[\s\S]*\}/); - if (m) jsonText = m[0]; + let json_text = raw_text; + const m = raw_text.match(/\{[\s\S]*\}/); + if (m) json_text = m[0]; - let parsed = JSON.parse(jsonText); + const parsed = JSON.parse(json_text); const text_total = Object.values(parsed.text).reduce( (a, b) => a + Number(b), @@ -1459,26 +1633,26 @@ export class TweetsService { } private async mentionNotification( - usernames: string[], + mentioned_user_ids: string[], user_id: string, tweet: Tweet, + action: 'add' | 'remove', parent_tweet?: TweetResponseDTO ): Promise { - if (usernames.length === 0) return; + if (mentioned_user_ids.length === 0) return; - try { - // Remove @ symbol from usernames and make them unique - const clean_usernames = [...new Set(usernames.map((u) => u.replace('@', '')))]; + const unique_mentioned_user_ids = Array.from(new Set(mentioned_user_ids)); + try { // Queue mention notification with usernames (background job will fetch user IDs) await this.mention_job_service.queueMentionNotification({ tweet, tweet_id: tweet.tweet_id, parent_tweet, mentioned_by: user_id, - mentioned_usernames: clean_usernames, + mentioned_user_ids: unique_mentioned_user_ids, tweet_type: tweet.type, - action: 'add', + action, }); } catch (error) { console.error('Error queueing mention notifications:', error); @@ -1488,54 +1662,30 @@ export class TweetsService { private async updateHashtags( names: string[], user_id: string, - query_runner: QueryRunner + query_runner: QueryRunner, + tweet_id?: string ): Promise { if (names.length === 0) return; - const hashtags = names.map((name) => ({ name, created_by: { id: user_id } }) as Hashtag); + const hashtags = names.map((name) => ({ name }) as Hashtag); await query_runner.manager.upsert(Hashtag, hashtags, { conflictPaths: ['name'], upsertType: 'on-conflict-do-update', }); await query_runner.manager.increment(Hashtag, { name: In(names) }, 'usage_count', 1); - } - async getTweetReplies( - tweet_id: string, - current_user_id: string, - query_dto: GetTweetRepliesQueryDto - ): Promise<{ - data: TweetResponseDTO[]; - count: number; - next_cursor: string | null; - has_more: boolean; - }> { - // First, check if the tweet exists - const tweet = await this.tweet_repository.findOne({ - where: { tweet_id }, - }); + // Insert hashtag-tweet associations if tweet_id is provided + if (tweet_id) { + const tweet_hashtags = names.map((hashtag_name) => ({ + tweet_id, + hashtag_name, + })); - if (!tweet) { - throw new NotFoundException('Tweet not found'); + await query_runner.manager.insert(TweetHashtag, tweet_hashtags); + console.log( + `Inserted ${tweet_hashtags.length} hashtag associations for tweet ${tweet_id}` + ); } - - const pagination: TimelinePaginationDto = { - limit: query_dto.limit ?? 20, - cursor: query_dto.cursor, - }; - - const { tweets, next_cursor } = await this.tweets_repository.getReplies( - tweet_id, - current_user_id, - pagination - ); - - return { - data: tweets, - count: tweets.length, - next_cursor, - has_more: next_cursor !== null, - }; } async getUserBookmarks( @@ -1550,7 +1700,7 @@ export class TweetsService { has_more: boolean; }; }> { - let query = this.tweet_bookmark_repository + const query = this.tweet_bookmark_repository .createQueryBuilder('bookmark') .leftJoinAndSelect('bookmark.tweet', 'tweet') .leftJoinAndSelect('tweet.user', 'user') @@ -1585,9 +1735,7 @@ export class TweetsService { return await this.getTweetWithUserById( bookmark.tweet.tweet_id, user_id, - true, // flag to include parent tweets - false, // don't include replies - 0 // replies_limit + true // flag to include parent tweets ); }) ); @@ -1608,4 +1756,163 @@ export class TweetsService { }, }; } + /////////////////////////////////////////////////////////// Fake Trend Tweets Methods ///////////////////////////////////////////////// + + /** + * Builds default hashtag topics structure for fake trend tweets + * Maps hashtags to a specified category with 100% weight + */ + buildDefaultHashtagTopics( + hashtags: string[], + topic: 'Sports' | 'Entertainment' | 'News' + ): Record> { + const topics_distribution: Record< + 'Sports' | 'Entertainment' | 'News', + Record + > = { + Sports: { Sports: 100, Entertainment: 0, News: 0 }, + Entertainment: { Sports: 0, Entertainment: 100, News: 0 }, + News: { Sports: 0, Entertainment: 0, News: 100 }, + }; + + const result: Record> = {}; + hashtags.forEach((hashtag) => { + // Remove # symbol if present + const clean_hashtag = hashtag.startsWith('#') ? hashtag.slice(1) : hashtag; + result[clean_hashtag] = topics_distribution[topic]; + }); + + return result; + } + + /** + * Creates a fake trend tweet with predefined hashtag topics + * Skips Groq AI extraction for performance + */ + async createFakeTrendTweet( + content: string, + user_id: string, + hashtag_topics: Record> + ): Promise { + const query_runner = this.data_source.createQueryRunner(); + await query_runner.connect(); + await query_runner.startTransaction(); + + try { + const mentions = await this.extractDataFromTweets( + { content }, + user_id, + query_runner, + true, // skip_extract_topics flag + hashtag_topics + ); + + const new_tweet = query_runner.manager.create(Tweet, { + user_id, + type: TweetType.TWEET, + content, + }); + + const saved_tweet = await query_runner.manager.save(Tweet, new_tweet); + + // Extract and insert hashtags into tweet_hashtags table + + await this.insertTweetHashtags(content, saved_tweet.tweet_id, user_id, query_runner); + + await query_runner.commitTransaction(); + + await this.es_index_tweet_service.queueIndexTweet({ + tweet_id: saved_tweet.tweet_id, + }); + + if (mentions.mentioned_user_ids.length > 0) { + await this.mentionNotification( + mentions.mentioned_user_ids, + user_id, + saved_tweet, + 'add' + ); + } + + return plainToInstance(TweetResponseDTO, saved_tweet, { + excludeExtraneousValues: true, + }); + } catch (error) { + console.error('Error in createFakeTrendTweet:', error); + if (query_runner.isTransactionActive) { + await query_runner.rollbackTransaction(); + } + throw error; + } finally { + await query_runner.release(); + } + } + async deleteTweetsByUserId(user_id: string): Promise { + const query_runner = this.data_source.createQueryRunner(); + await query_runner.connect(); + await query_runner.startTransaction(); + + try { + console.log(user_id); + const tweets = await this.tweet_repository.find({ + where: { user_id }, + select: ['tweet_id', 'user_id', 'type'], + }); + + if (tweets.length === 0) { + console.log(`No tweets found for user ${user_id}`); + return; + } + + for (const tweet of tweets) { + try { + // Queue repost and quote delete jobs, handle mentions + await this.queueRepostAndQuoteDeleteJobs( + tweet, + tweet.type, + user_id, + query_runner + ); + await query_runner.commitTransaction(); + + // Hard delete the tweet + await this.tweet_repository.delete({ tweet_id: tweet.tweet_id }); + + // Queue elasticsearch deletion + await this.es_delete_tweet_service.queueDeleteTweet({ + tweet_ids: [tweet.tweet_id], + }); + } catch (error) { + console.error(`Error deleting tweet ${tweet.tweet_id}:`, error); + // Continue deleting other tweets even if one fails + } + } + + console.log(`Successfully deleted ${tweets.length} tweets for user ${user_id}`); + } catch (error) { + if (query_runner.isTransactionActive) { + await query_runner.rollbackTransaction(); + } + console.error('Error deleting tweets by user:', error); + throw error; + } finally { + await query_runner.release(); + } + } + + async insertTweetHashtags( + content: string, + tweet_id: string, + user_id: string, + query_runner: QueryRunner + ): Promise { + if (content) { + const hashtags = extractHashtags(content) || []; + const unique_hashtags = [...new Set(hashtags)]; + // const normalized_hashtags = unique_hashtags.map((h) => h.toLowerCase()); + if (unique_hashtags.length > 0) { + await this.updateHashtags(unique_hashtags, user_id, query_runner, tweet_id); + } + } + } } diff --git a/src/tweets/utils/file-upload.config.ts b/src/tweets/utils/file-upload.config.ts index d34bac3f..46e39979 100644 --- a/src/tweets/utils/file-upload.config.ts +++ b/src/tweets/utils/file-upload.config.ts @@ -1,11 +1,12 @@ import { BadRequestException } from '@nestjs/common'; import { ERROR_MESSAGES } from '../../constants/swagger-messages'; +import { ALLOWED_IMAGE_MIME_TYPES } from 'src/constants/variables'; // Image configuration export const image_file_filter = (req: any, file: any, callback: any) => { - const allowed_mime_types = ['image/jpeg', 'image/png', 'image/gif', 'image/webp']; + // const allowed_mime_types = ['image/jpeg', 'image/png', 'image/gif', 'image/webp']; - if (!allowed_mime_types.includes(file.mimetype)) { + if (!ALLOWED_IMAGE_MIME_TYPES.includes(file.mimetype)) { return callback(new BadRequestException(ERROR_MESSAGES.INVALID_FILE_TYPE), false); } callback(null, true); diff --git a/src/user/dto/pagination-params.dto.ts b/src/user/dto/pagination-params.dto.ts index 938b969b..ff0abe81 100644 --- a/src/user/dto/pagination-params.dto.ts +++ b/src/user/dto/pagination-params.dto.ts @@ -8,7 +8,7 @@ export class PaginationParamsDto { example: 1, type: Number, }) - @Transform(({ value }) => (value ? parseInt(value, 10) : 0)) + @Transform(({ value }) => (value ? Number.parseInt(value, 10) : 0)) @IsInt() @Min(0) page_offset: number = 0; @@ -18,7 +18,7 @@ export class PaginationParamsDto { example: 10, type: Number, }) - @Transform(({ value }) => (value ? parseInt(value, 10) : 10)) + @Transform(({ value }) => (value ? Number.parseInt(value, 10) : 10)) @IsInt() @Min(1) @Max(100) diff --git a/src/user/dto/update_phone_number.dto.ts b/src/user/dto/update_phone_number.dto.ts index ef191d29..02dfb40b 100644 --- a/src/user/dto/update_phone_number.dto.ts +++ b/src/user/dto/update_phone_number.dto.ts @@ -1,6 +1,5 @@ import { ApiProperty } from '@nestjs/swagger'; import { IsOptional, IsPhoneNumber, IsString, MaxLength } from 'class-validator'; -import { STRING_MAX_LENGTH } from 'src/constants/variables'; export class UpdatePhoneNumberDto { @ApiProperty({ diff --git a/src/user/entities/index.ts b/src/user/entities/index.ts index ebb83f5f..c73227ff 100644 --- a/src/user/entities/index.ts +++ b/src/user/entities/index.ts @@ -2,3 +2,4 @@ export { User } from './user.entity'; export { UserFollows } from './user-follows.entity'; export { UserBlocks } from './user-blocks.entity'; export { UserMutes } from './user-mutes.entity'; +export { UserTimelineCursor } from './user-timeline-cursor.entity'; diff --git a/src/user/entities/user-blocks.entity.ts b/src/user/entities/user-blocks.entity.ts index 61d4180a..41d1352e 100644 --- a/src/user/entities/user-blocks.entity.ts +++ b/src/user/entities/user-blocks.entity.ts @@ -1,4 +1,4 @@ -import { Column, Entity, Index, JoinColumn, ManyToMany, PrimaryColumn } from 'typeorm'; +import { Column, Entity, Index, JoinColumn, ManyToMany, ManyToOne, PrimaryColumn } from 'typeorm'; import { User } from '../../user/entities/user.entity'; @Entity('user_blocks') @@ -13,11 +13,11 @@ export class UserBlocks { @Column({ type: 'timestamptz', default: () => 'CURRENT_TIMESTAMP' }) created_at: Date; - @ManyToMany(() => User, { onDelete: 'CASCADE' }) + @ManyToOne(() => User, { onDelete: 'CASCADE' }) @JoinColumn({ name: 'blocker_id' }) blocker: User; - @ManyToMany(() => User, { onDelete: 'CASCADE' }) + @ManyToOne(() => User, { onDelete: 'CASCADE' }) @JoinColumn({ name: 'blocked_id' }) blocked: User; diff --git a/src/user/entities/user-mutes.entity.ts b/src/user/entities/user-mutes.entity.ts index 0ffe0ae7..bbf0045c 100644 --- a/src/user/entities/user-mutes.entity.ts +++ b/src/user/entities/user-mutes.entity.ts @@ -1,4 +1,4 @@ -import { Column, Entity, JoinColumn, ManyToMany, PrimaryColumn } from 'typeorm'; +import { Column, Entity, JoinColumn, ManyToMany, ManyToOne, PrimaryColumn } from 'typeorm'; import { User } from '../../user/entities/user.entity'; @Entity('user_mutes') @@ -12,11 +12,11 @@ export class UserMutes { @Column({ type: 'timestamptz', default: () => 'CURRENT_TIMESTAMP' }) created_at: Date; - @ManyToMany(() => User, { onDelete: 'CASCADE' }) + @ManyToOne(() => User, { onDelete: 'CASCADE' }) @JoinColumn({ name: 'muter_id' }) muter: User; - @ManyToMany(() => User, { onDelete: 'CASCADE' }) + @ManyToOne(() => User, { onDelete: 'CASCADE' }) @JoinColumn({ name: 'muted_id' }) muted: User; diff --git a/src/user/entities/user-timeline-cursor.entity.ts b/src/user/entities/user-timeline-cursor.entity.ts new file mode 100644 index 00000000..09eccc1d --- /dev/null +++ b/src/user/entities/user-timeline-cursor.entity.ts @@ -0,0 +1,19 @@ +import { Column, Entity, JoinColumn, ManyToOne, PrimaryColumn } from 'typeorm'; +import { User } from './user.entity'; + +@Entity('user_timeline_cursors') +export class UserTimelineCursor { + @PrimaryColumn({ type: 'uuid' }) + user_id: string; + + @Column({ type: 'uuid', nullable: true }) + last_fetched_tweet_id: string | null; + @Column({ type: 'integer', default: 0 }) + last_fetched_position: number; + @Column({ type: 'timestamptz', default: () => 'CURRENT_TIMESTAMP' }) + last_updated_at: Date; + + @ManyToOne(() => User, { onDelete: 'CASCADE' }) + @JoinColumn({ name: 'user_id' }) + user: User; +} diff --git a/src/user/entities/user.entity.ts b/src/user/entities/user.entity.ts index 21061192..4f993e2d 100644 --- a/src/user/entities/user.entity.ts +++ b/src/user/entities/user.entity.ts @@ -1,6 +1,5 @@ import { Exclude } from 'class-transformer'; import { Tweet } from '../../tweets/entities/tweet.entity'; -import { Hashtag } from '../../tweets/entities/hashtags.entity'; import { Column, DeleteDateColumn, @@ -88,12 +87,9 @@ export class User { @Column({ type: 'int', default: 0 }) following: number; - @Column({ name: 'fcm_token', type: 'varchar', unique: true, nullable: true }) + @Column({ name: 'fcm_token', type: 'varchar', unique: true, nullable: true, select: false }) fcm_token?: string | null; - @OneToMany(() => Hashtag, (hashtags) => hashtags.created_by, { onDelete: 'CASCADE' }) - hashtags: Hashtag[]; - @OneToMany(() => Tweet, (tweet) => tweet.user, {}) tweets: Tweet[]; diff --git a/src/user/user.controller.spec.ts b/src/user/user.controller.spec.ts index 7b5f9712..988304ef 100644 --- a/src/user/user.controller.spec.ts +++ b/src/user/user.controller.spec.ts @@ -13,6 +13,7 @@ import { ConflictException, ForbiddenException, NotFoundException, + UnauthorizedException, } from '@nestjs/common'; import { ERROR_MESSAGES } from 'src/constants/swagger-messages'; import { GetUsersByIdDto } from './dto/get-users-by-id.dto'; @@ -29,6 +30,7 @@ import { CursorPaginationDto } from './dto/cursor-pagination-params.dto'; import { TweetResponseDTO } from 'src/tweets/dto/tweet-response.dto'; import { TweetType } from 'src/shared/enums/tweet-types.enum'; import { UsernameRecommendationsResponseDto } from './dto/username-recommendations-response.dto'; +import { UserRelationsResponseDto } from './dto/user-relations-response.dto'; describe('UserController', () => { let controller: UserController; @@ -65,6 +67,7 @@ describe('UserController', () => { assignInterests: jest.fn(), changeLanguage: jest.fn(), getUsernameRecommendations: jest.fn(), + getUserRelationsCounts: jest.fn(), }; const module: TestingModule = await Test.createTestingModule({ @@ -679,6 +682,144 @@ describe('UserController', () => { }); }); + describe('getFollowings', () => { + it('should call user_service.getFollowing with the current user id, target user id and getFollowingDto without following filter', async () => { + const mock_response: UserListResponseDto = { + data: [ + { + user_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + name: 'Alyaa Ali', + username: 'Alyaali242', + bio: 'hi there!', + avatar_url: 'https://cdn.app.com/profiles/u877.jpg', + is_following: false, + is_follower: false, + is_muted: false, + is_blocked: true, + verified: false, + followers: 0, + following: 0, + }, + { + user_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + name: 'Amira Khalid', + username: 'amira2342', + bio: 'hi there!', + avatar_url: 'https://cdn.app.com/profiles/u877.jpg', + is_following: true, + is_follower: false, + is_muted: true, + is_blocked: true, + verified: false, + followers: 0, + following: 0, + }, + ], + pagination: { + next_cursor: '2025-10-31T12:00:00.000Z_550e8400-e29b-41d4-a716-446655440000', + has_more: false, + }, + }; + + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const target_user_id = 'b2d59899-f706-4c8f-97d7-ba2e9fc22d90'; + const query_dto: GetFollowersDto = { + cursor: '2025-10-31T12:00:00.000Z_550e8400-e29b-41d4-a716-446655440000', + limit: 20, + }; + + const get_followers_spy = jest + .spyOn(user_service, 'getFollowing') + .mockResolvedValueOnce(mock_response); + + const result = await controller.getFollowing( + current_user_id, + target_user_id, + query_dto + ); + + expect(get_followers_spy).toHaveBeenCalledWith( + current_user_id, + target_user_id, + query_dto + ); + expect(get_followers_spy).toHaveBeenCalledTimes(1); + expect(result).toEqual(mock_response); + }); + it('should call user_service.getFollowing with the current user id, target user id and getFollowingDto with following filter', async () => { + const mock_response: UserListResponseDto = { + data: [ + { + user_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + name: 'Alyaa Ali', + username: 'Alyaali242', + bio: 'hi there!', + avatar_url: 'https://cdn.app.com/profiles/u877.jpg', + is_following: false, + is_follower: false, + is_muted: false, + is_blocked: true, + verified: false, + followers: 0, + following: 0, + }, + ], + pagination: { + next_cursor: '2025-10-31T12:00:00.000Z_550e8400-e29b-41d4-a716-446655440000', + has_more: false, + }, + }; + + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const target_user_id = 'b2d59899-f706-4c8f-97d7-ba2e9fc22d90'; + const query_dto: GetFollowersDto = { + cursor: '2025-10-31T12:00:00.000Z_550e8400-e29b-41d4-a716-446655440000', + limit: 20, + }; + + const get_followers_spy = jest + .spyOn(user_service, 'getFollowing') + .mockResolvedValueOnce(mock_response); + + const result = await controller.getFollowing( + current_user_id, + target_user_id, + query_dto + ); + + expect(get_followers_spy).toHaveBeenCalledWith( + current_user_id, + target_user_id, + query_dto + ); + expect(get_followers_spy).toHaveBeenCalledTimes(1); + expect(result).toEqual(mock_response); + }); + + it('should throw if service throws user not found', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const target_user_id = 'b2d59899-f706-4c8f-97d7-ba2e9fc22d90'; + + const query_dto: GetFollowersDto = { + cursor: '2025-10-31T12:00:00.000Z_550e8400-e29b-41d4-a716-446655440000', + limit: 20, + }; + + const error = new NotFoundException(ERROR_MESSAGES.USER_NOT_FOUND); + + const get_followers = jest + .spyOn(user_service, 'getFollowing') + .mockRejectedValueOnce(error); + + await expect( + controller.getFollowing(current_user_id, target_user_id, query_dto) + ).rejects.toThrow(ERROR_MESSAGES.USER_NOT_FOUND); + + expect(get_followers).toHaveBeenCalledWith(current_user_id, target_user_id, query_dto); + expect(get_followers).toHaveBeenCalledTimes(1); + }); + }); + describe('getMutedList', () => { it('should call user_service.getMutedList with the current user id, target user id and queryDto', async () => { const mock_response: UserListResponseDto = { @@ -2134,4 +2275,118 @@ describe('UserController', () => { expect(get_username_recommendations_spy).toHaveBeenCalledTimes(1); }); }); + + describe('getRelationsCount', () => { + it('should return user relations count', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const mock_relations: UserRelationsResponseDto = { + blocked_count: 5, + muted_count: 10, + }; + + user_service.getUserRelationsCounts.mockResolvedValueOnce(mock_relations); + + const result = await controller.getRelationsCount(current_user_id); + + expect(result).toEqual(mock_relations); + expect(result.blocked_count).toBe(5); + expect(result.muted_count).toBe(10); + expect(user_service.getUserRelationsCounts).toHaveBeenCalledWith(current_user_id); + expect(user_service.getUserRelationsCounts).toHaveBeenCalledTimes(1); + }); + + it('should handle zero counts', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const mock_relations: UserRelationsResponseDto = { + blocked_count: 0, + muted_count: 0, + }; + + user_service.getUserRelationsCounts.mockResolvedValueOnce(mock_relations); + + const result = await controller.getRelationsCount(current_user_id); + + expect(result).toEqual(mock_relations); + expect(result.blocked_count).toBe(0); + expect(result.muted_count).toBe(0); + }); + + it('should throw error when service fails', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + + user_service.getUserRelationsCounts.mockRejectedValueOnce(new Error('Database error')); + + await expect(controller.getRelationsCount(current_user_id)).rejects.toThrow( + 'Database error' + ); + expect(user_service.getUserRelationsCounts).toHaveBeenCalledWith(current_user_id); + }); + + it('should handle service returning null', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + + user_service.getUserRelationsCounts.mockResolvedValueOnce(null as any); + + const result = await controller.getRelationsCount(current_user_id); + + expect(result).toBeNull(); + }); + + it('should handle large blocked and muted counts', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const mock_relations: UserRelationsResponseDto = { + blocked_count: 1000, + muted_count: 500, + }; + + user_service.getUserRelationsCounts.mockResolvedValueOnce(mock_relations); + + const result = await controller.getRelationsCount(current_user_id); + + expect(result.blocked_count).toBe(1000); + expect(result.muted_count).toBe(500); + }); + + it('should handle only blocked count with no muted', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const mock_relations: UserRelationsResponseDto = { + blocked_count: 25, + muted_count: 0, + }; + + user_service.getUserRelationsCounts.mockResolvedValueOnce(mock_relations); + + const result = await controller.getRelationsCount(current_user_id); + + expect(result.blocked_count).toBe(25); + expect(result.muted_count).toBe(0); + }); + + it('should handle only muted count with no blocked', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const mock_relations: UserRelationsResponseDto = { + blocked_count: 0, + muted_count: 15, + }; + + user_service.getUserRelationsCounts.mockResolvedValueOnce(mock_relations); + + const result = await controller.getRelationsCount(current_user_id); + + expect(result.blocked_count).toBe(0); + expect(result.muted_count).toBe(15); + }); + + it('should handle unauthorized error', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + + user_service.getUserRelationsCounts.mockRejectedValueOnce( + new UnauthorizedException(ERROR_MESSAGES.INVALID_OR_EXPIRED_TOKEN) + ); + + await expect(controller.getRelationsCount(current_user_id)).rejects.toThrow( + UnauthorizedException + ); + }); + }); }); diff --git a/src/user/user.controller.ts b/src/user/user.controller.ts index 361b44e8..7d447cc6 100644 --- a/src/user/user.controller.ts +++ b/src/user/user.controller.ts @@ -9,19 +9,17 @@ import { Post, Query, Req, - Res, UploadedFile, UseGuards, UseInterceptors, } from '@nestjs/common'; -import type { Request, Response } from 'express'; +import type { Request } from 'express'; import { UserService } from './user.service'; import { ApiBearerAuth, ApiBody, ApiConsumes, ApiCreatedResponse, - ApiNoContentResponse, ApiOkResponse, ApiOperation, ApiTags, @@ -70,9 +68,7 @@ import { JwtAuthGuard } from 'src/auth/guards/jwt.guard'; import { GetUsersByIdDto } from './dto/get-users-by-id.dto'; import { GetUsersByUsernameDto } from './dto/get-users-by-username.dto'; import { GetFollowersDto } from './dto/get-followers.dto'; -import { PaginationParamsDto } from './dto/pagination-params.dto'; import { UpdateUserDto } from './dto/update-user.dto'; -import { UpdatePhoneNumberDto } from './dto/update_phone_number.dto'; import { GetUserId } from 'src/decorators/get-userId.decorator'; import { FileInterceptor } from '@nestjs/platform-express'; import { DeleteFileDto } from './dto/delete-file.dto'; diff --git a/src/user/user.module.ts b/src/user/user.module.ts index d2132414..dde0386f 100644 --- a/src/user/user.module.ts +++ b/src/user/user.module.ts @@ -15,6 +15,7 @@ import { UsernameService } from 'src/auth/username.service'; import { FollowJobService } from 'src/background-jobs/notifications/follow/follow.service'; import { BackgroundJobsModule } from 'src/background-jobs'; import { CommunicationModule } from 'src/communication/communication.module'; +import { TimelineModule } from 'src/timeline/timeline.module'; @Module({ imports: [ @@ -24,9 +25,10 @@ import { CommunicationModule } from 'src/communication/communication.module'; CategoryModule, TweetsModule, BackgroundJobsModule, + TimelineModule, ], controllers: [UserController], providers: [UserService, UserRepository, PaginationService, UsernameService, FollowJobService], - exports: [UserRepository], + exports: [UserRepository, UserService], }) export class UserModule {} diff --git a/src/user/user.repository.spec.ts b/src/user/user.repository.spec.ts index dba27718..679b92a4 100644 --- a/src/user/user.repository.spec.ts +++ b/src/user/user.repository.spec.ts @@ -1640,6 +1640,7 @@ describe('UserRepository', () => { 'user.id AS user_exists', 'user.avatar_url AS avatar_url', 'user.name AS name', + 'user.username AS username', ]); expect(mock_query_builder.addSelect).toHaveBeenCalledWith( expect.stringContaining('user_follows'), diff --git a/src/user/user.repository.ts b/src/user/user.repository.ts index c3d0dd87..e8bff3d4 100644 --- a/src/user/user.repository.ts +++ b/src/user/user.repository.ts @@ -24,7 +24,7 @@ export class UserRepository extends Repository { } async findByEmail(email: string): Promise { - return await this.findOne({ where: { email } }); + return await this.findOne({ where: { email: email } }); } async findByGithubId(github_id: string): Promise { @@ -519,6 +519,7 @@ export class UserRepository extends Repository { 'user.id AS user_exists', 'user.avatar_url AS avatar_url', 'user.name AS name', + 'user.username AS username', ]) .addSelect( `EXISTS( diff --git a/src/user/user.service.spec.ts b/src/user/user.service.spec.ts index b75af40a..0da6efbb 100644 --- a/src/user/user.service.spec.ts +++ b/src/user/user.service.spec.ts @@ -3,6 +3,9 @@ import { UserService } from './user.service'; import { UserRepository } from './user.repository'; import { UserProfileDto } from './dto/user-profile.dto'; import { DetailedUserProfileDto } from './dto/detailed-user-profile.dto'; +import { InitTimelineQueueJobService } from 'src/background-jobs/timeline/timeline.service'; +import { TimelineRedisService } from 'src/timeline/services/timeline-redis.service'; +import { TimelineCandidatesService } from 'src/timeline/services/timeline-candidates.service'; import { BadRequestException, ConflictException, @@ -25,7 +28,7 @@ import { User } from './entities'; import { UploadFileResponseDto } from './dto/upload-file-response.dto'; import { DeleteFileDto } from './dto/delete-file.dto'; import { AssignInterestsDto } from './dto/assign-interests.dto'; -import { DeleteResult, In, Repository } from 'typeorm'; +import { DeleteResult, EntityManager, In, Repository } from 'typeorm'; import { ChangeLanguageDto } from './dto/change-language.dto'; import { ChangeLanguageResponseDto } from './dto/change-language-response.dto'; import { UserListResponseDto } from './dto/user-list-response.dto'; @@ -39,6 +42,7 @@ import { FollowJobService } from 'src/background-jobs/notifications/follow/follo import { EsUpdateUserJobService } from 'src/background-jobs/elasticsearch/es-update-user.service'; import { EsDeleteUserJobService } from 'src/background-jobs/elasticsearch/es-delete-user.service'; import { EsFollowJobService } from 'src/background-jobs/elasticsearch/es-follow.service'; +import { RedisService } from 'src/redis/redis.service'; describe('UserService', () => { let service: UserService; @@ -48,10 +52,24 @@ describe('UserService', () => { let pagination_service: jest.Mocked; let azure_storage_service: jest.Mocked; let config_service: jest.Mocked; + let redis_service: jest.Mocked; let category_repository: jest.Mocked>; let follow_job_service: jest.Mocked; + let es_delete_user_job_service: jest.Mocked; + let es_update_user_job_service: jest.Mocked; + let manager: jest.Mocked; beforeEach(async () => { + const mock_manager = { + count: jest.fn(), + find: jest.fn(), + findOne: jest.fn(), + save: jest.fn(), + delete: jest.fn(), + query: jest.fn(), + transaction: jest.fn(), + }; + const mock_user_repository = { getFollowersList: jest.fn(), getFollowingList: jest.fn(), @@ -76,6 +94,7 @@ describe('UserService', () => { delete: jest.fn(), softDelete: jest.fn(), exists: jest.fn(), + manager: mock_manager, }; const mock_tweet_repository = { @@ -127,10 +146,29 @@ describe('UserService', () => { get: jest.fn(), }; + const mock_redis_service = { + smembers: jest.fn(), + del: jest.fn(), + set: jest.fn(), + }; + const mock_category_repository = { findBy: jest.fn(), }; + const mock_init_timeline_queue_job_service = { + addJob: jest.fn(), + }; + + const mock_timeline_redis_service = { + initializeQueue: jest.fn(), + getTweetIdsInQueue: jest.fn().mockResolvedValue(new Set()), + }; + + const mock_timeline_candidates_service = { + getCandidates: jest.fn().mockResolvedValue([]), + }; + const module: TestingModule = await Test.createTestingModule({ providers: [ UserService, @@ -138,6 +176,7 @@ describe('UserService', () => { { provide: TweetsRepository, useValue: mock_tweet_repository }, { provide: AzureStorageService, useValue: mock_azure_storage_service }, { provide: ConfigService, useValue: mock_config_service }, + { provide: RedisService, useValue: mock_redis_service }, { provide: getRepositoryToken(Category), useValue: mock_category_repository }, { provide: UsernameService, useValue: mock_username_service }, { provide: PaginationService, useValue: mock_pagination_service }, @@ -145,6 +184,12 @@ describe('UserService', () => { { provide: EsUpdateUserJobService, useValue: mock_es_update_user_job_service }, { provide: EsDeleteUserJobService, useValue: mock_es_delete_user_job_service }, { provide: EsFollowJobService, useValue: mock_es_follow_job_service }, + { + provide: InitTimelineQueueJobService, + useValue: mock_init_timeline_queue_job_service, + }, + { provide: TimelineRedisService, useValue: mock_timeline_redis_service }, + { provide: TimelineCandidatesService, useValue: mock_timeline_candidates_service }, ], }).compile(); @@ -153,10 +198,14 @@ describe('UserService', () => { tweets_repository = module.get(TweetsRepository); azure_storage_service = module.get(AzureStorageService); config_service = module.get(ConfigService); + redis_service = module.get(RedisService); category_repository = module.get(getRepositoryToken(Category)); username_service = module.get(UsernameService); pagination_service = module.get(PaginationService); follow_job_service = module.get(FollowJobService); + es_delete_user_job_service = module.get(EsDeleteUserJobService); + es_update_user_job_service = module.get(EsUpdateUserJobService); + manager = user_repository.manager as jest.Mocked; }); afterEach(() => jest.clearAllMocks()); @@ -636,6 +685,17 @@ describe('UserService', () => { expect(get_my_profile_spy).toHaveBeenCalledTimes(1); expect(result).toEqual(mock_response); }); + + it('should throw if target user not found', async () => { + const user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + + const exists_spy = jest.spyOn(user_repository, 'findOne').mockResolvedValueOnce(null); + + await expect(service.getMe(user_id)).rejects.toThrow(ERROR_MESSAGES.USER_NOT_FOUND); + + expect(exists_spy).toHaveBeenCalledWith({ where: { id: user_id } }); + expect(exists_spy).toHaveBeenCalledTimes(1); + }); }); describe('getUserById', () => { @@ -1270,6 +1330,10 @@ describe('UserService', () => { const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; const target_user_id = 'b2d59899-f706-4c8f-97d7-ba2e9fc22d90'; + jest.spyOn(user_repository, 'findOne').mockResolvedValue({ + id: current_user_id, + } as any); + const validate_spy = jest .spyOn(user_repository, 'validateRelationshipRequest') .mockResolvedValueOnce({ @@ -1359,6 +1423,10 @@ describe('UserService', () => { const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; const target_user_id = 'b2d59899-f706-4c8f-97d7-ba2e9fc22d90'; + jest.spyOn(user_repository, 'findOne').mockResolvedValue({ + id: current_user_id, + } as any); + const validate_spy = jest .spyOn(user_repository, 'validateRelationshipRequest') .mockResolvedValueOnce({ @@ -1398,6 +1466,10 @@ describe('UserService', () => { const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; const target_user_id = 'b2d59899-f706-4c8f-97d7-ba2e9fc22d90'; + jest.spyOn(user_repository, 'findOne').mockResolvedValue({ + id: current_user_id, + } as any); + const validate_spy = jest .spyOn(user_repository, 'validateRelationshipRequest') .mockResolvedValueOnce({ @@ -1437,6 +1509,10 @@ describe('UserService', () => { const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; const target_user_id = 'b2d59899-f706-4c8f-97d7-ba2e9fc22d90'; + jest.spyOn(user_repository, 'findOne').mockResolvedValue({ + id: current_user_id, + } as any); + const validate_spy = jest .spyOn(user_repository, 'validateRelationshipRequest') .mockResolvedValueOnce({ @@ -1471,6 +1547,20 @@ describe('UserService', () => { expect(verify_permissions_spy).toHaveBeenCalledWith(current_user_id, target_user_id); expect(verify_permissions_spy).toHaveBeenCalledTimes(1); }); + + it('should throw if current user not found', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const target_user_id = '0b059899-f706-4c8f-97d7-ba2e9fc22d6d'; + + const exists_spy = jest.spyOn(user_repository, 'findOne').mockResolvedValueOnce(null); + + await expect(service.followUser(current_user_id, target_user_id)).rejects.toThrow( + ERROR_MESSAGES.USER_NOT_FOUND + ); + + expect(exists_spy).toHaveBeenCalledWith({ where: { id: current_user_id } }); + expect(exists_spy).toHaveBeenCalledTimes(1); + }); }); describe('unfollowUser', () => { @@ -1863,252 +1953,1010 @@ describe('UserService', () => { }); describe('updateUser', () => { - it('should update user and return updated profile', async () => { - const user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; - const update_user_dto: UpdateUserDto = { - name: 'Updated Name', - bio: 'Updated bio', - avatar_url: 'https://cdn.app.com/profiles/updated.jpg', - }; - - const existing_user: User = { - id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - name: 'Alyaa Ali', - username: 'Alyaa242', - password: 'hashed-password', - email: 'example@gmail.com', - created_at: new Date('2025-10-21T09:26:17.432Z'), - updated_at: new Date('2025-10-21T09:26:17.432Z'), - deleted_at: null, - language: 'ar', - bio: 'Software developer and tech enthusiast.', - avatar_url: 'https://example.com/images/profile.jpg', - cover_url: 'https://example.com/images/cover.jpg', - birth_date: new Date('2003-05-14'), - country: null, - verified: false, - online: false, - followers: 10, - following: 15, - hashtags: [], - tweets: [], + const user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + + it('should update user successfully', async () => { + const existing_user = { + id: user_id, + username: 'testuser', + name: 'Test User', + bio: 'Old bio', + avatar_url: null, + cover_url: null, }; - const updated_user: User = { - id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + const update_user_dto: UpdateUserDto = { name: 'Updated Name', - username: 'Alyaa242', - password: 'hashed-password', - email: 'example@gmail.com', - created_at: new Date('2025-10-21T09:26:17.432Z'), - updated_at: new Date('2025-10-21T09:26:17.432Z'), - deleted_at: null, - language: 'ar', bio: 'Updated bio', - avatar_url: 'https://cdn.app.com/profiles/updated.jpg', - cover_url: 'https://example.com/images/cover.jpg', - birth_date: new Date('2003-05-14'), - country: null, - verified: false, - online: false, - followers: 10, - following: 15, - hashtags: [], - tweets: [], }; - const mock_response = { - user_id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', + const updated_user = { + ...existing_user, name: 'Updated Name', - username: 'Alyaa242', bio: 'Updated bio', - avatar_url: 'https://cdn.app.com/profiles/updated.jpg', - cover_url: 'https://example.com/images/cover.jpg', - country: null, - created_at: new Date('2025-10-21T09:26:17.432Z'), - birth_date: new Date('2003-05-14'), - followers_count: 10, - following_count: 15, - email: 'example@gmail.com', - num_likes: undefined, - num_media: undefined, - num_posts: undefined, - num_replies: undefined, }; - const find_one_spy = jest - .spyOn(user_repository, 'findOne') - .mockResolvedValueOnce(existing_user); - - const save_spy = jest - .spyOn(user_repository, 'save') - .mockResolvedValueOnce(updated_user); + user_repository.findOne.mockResolvedValueOnce(existing_user as any); + user_repository.save.mockResolvedValueOnce(updated_user as any); const result = await service.updateUser(user_id, update_user_dto); - expect(find_one_spy).toHaveBeenCalledWith({ + expect(user_repository.findOne).toHaveBeenCalledWith({ where: { id: user_id }, }); - expect(find_one_spy).toHaveBeenCalledTimes(1); - expect(save_spy).toHaveBeenCalledWith(updated_user); - expect(save_spy).toHaveBeenCalledTimes(1); - expect(result).toEqual(mock_response); + expect(user_repository.save).toHaveBeenCalledWith( + expect.objectContaining({ + name: 'Updated Name', + bio: 'Updated bio', + }) + ); + expect(es_update_user_job_service.queueUpdateUser).toHaveBeenCalledWith({ + user_id, + }); + expect(result).toMatchObject({ + name: 'Updated Name', + bio: 'Updated bio', + }); }); it('should throw NotFoundException when user does not exist', async () => { - const user_id = 'nonexistent-user-id'; const update_user_dto: UpdateUserDto = { name: 'Updated Name', }; - const find_one_spy = jest.spyOn(user_repository, 'findOne').mockResolvedValueOnce(null); + user_repository.findOne.mockResolvedValueOnce(null); + await expect(service.updateUser(user_id, update_user_dto)).rejects.toThrow( + NotFoundException + ); await expect(service.updateUser(user_id, update_user_dto)).rejects.toThrow( ERROR_MESSAGES.USER_NOT_FOUND ); - - expect(find_one_spy).toHaveBeenCalledWith({ - where: { id: user_id }, - }); - expect(find_one_spy).toHaveBeenCalledTimes(1); + expect(user_repository.save).not.toHaveBeenCalled(); }); - }); - describe('deleteUser', () => { - it('should delete user successfully', async () => { - const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + it('should update only provided fields and skip undefined fields', async () => { + const existing_user = { + id: user_id, + username: 'testuser', + name: 'Test User', + bio: 'Old bio', + location: 'Old location', + }; - const existing_user: User = { - id: '0c059899-f706-4c8f-97d7-ba2e9fc22d6d', - name: 'Alyaa Ali', - username: 'Alyaa242', - password: 'hashed-password', - email: 'example@gmail.com', - created_at: new Date('2025-10-21T09:26:17.432Z'), - updated_at: new Date('2025-10-21T09:26:17.432Z'), - deleted_at: null, - language: 'ar', - bio: 'Software developer and tech enthusiast.', - avatar_url: 'https://example.com/images/profile.jpg', - cover_url: 'https://example.com/images/cover.jpg', - birth_date: new Date('2003-05-14'), - country: null, - verified: false, - online: false, - followers: 10, - following: 15, - hashtags: [], - tweets: [], + const update_user_dto: UpdateUserDto = { + name: 'Updated Name', + bio: undefined, }; - const find_one_spy = jest - .spyOn(user_repository, 'findOne') - .mockResolvedValueOnce(existing_user); + const updated_user = { + ...existing_user, + name: 'Updated Name', + }; - const delete_spy = jest - .spyOn(user_repository, 'softDelete') - .mockResolvedValueOnce({ affected: 1, raw: {}, generatedMaps: [] }); + user_repository.findOne.mockResolvedValueOnce(existing_user as any); + user_repository.save.mockResolvedValueOnce(updated_user as any); - await service.deleteUser(current_user_id); + await service.updateUser(user_id, update_user_dto); - expect(find_one_spy).toHaveBeenCalledWith({ - where: { id: current_user_id }, - }); - expect(find_one_spy).toHaveBeenCalledTimes(1); - expect(delete_spy).toHaveBeenCalledWith(current_user_id); - expect(delete_spy).toHaveBeenCalledTimes(1); + expect(user_repository.save).toHaveBeenCalledWith( + expect.objectContaining({ + name: 'Updated Name', + bio: 'Old bio', + }) + ); }); - it('should throw NotFoundException when user does not exist', async () => { - const current_user_id = 'nonexistent-user-id'; + it('should update avatar and delete old avatar file', async () => { + const old_avatar_url = 'https://storage.azure.com/profiles/old-avatar.jpg'; + const new_avatar_url = 'https://storage.azure.com/profiles/new-avatar.jpg'; - const find_one_spy = jest.spyOn(user_repository, 'findOne').mockResolvedValueOnce(null); + const existing_user = { + id: user_id, + username: 'testuser', + name: 'Test User', + avatar_url: old_avatar_url, + cover_url: null, + }; - await expect(service.deleteUser(current_user_id)).rejects.toThrow( - ERROR_MESSAGES.USER_NOT_FOUND - ); + const update_user_dto: UpdateUserDto = { + avatar_url: new_avatar_url, + }; - expect(find_one_spy).toHaveBeenCalledWith({ - where: { id: current_user_id }, - }); - expect(find_one_spy).toHaveBeenCalledTimes(1); - }); - }); + const updated_user = { + ...existing_user, + avatar_url: new_avatar_url, + }; - describe('uploadAvatar', () => { - it('should upload avatar successfully and return file info', async () => { - const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; - const file = { - fieldname: 'file', - originalname: 'avatar.jpg', - encoding: '7bit', - mimetype: 'image/jpeg', - size: 1024, - buffer: Buffer.from('fake-image-data'), - } as Express.Multer.File; + user_repository.findOne.mockResolvedValueOnce(existing_user as any); + user_repository.save.mockResolvedValueOnce(updated_user as any); + azure_storage_service.extractFileName.mockReturnValueOnce('old-avatar.jpg'); + config_service.get.mockReturnValueOnce('profile-images-container'); - const generated_file_name = `${current_user_id}_avatar.jpg`; - const uploaded_image_url = 'https://cdn.app.com/profiles/avatar.jpg'; - const container_name = 'container-name'; + const result = await service.updateUser(user_id, update_user_dto); - const mock_response: UploadFileResponseDto = { - image_url: uploaded_image_url, - image_name: generated_file_name, + expect(azure_storage_service.extractFileName).toHaveBeenCalledWith(old_avatar_url); + expect(config_service.get).toHaveBeenCalledWith( + 'AZURE_STORAGE_PROFILE_IMAGE_CONTAINER' + ); + expect(azure_storage_service.deleteFile).toHaveBeenCalledWith( + 'old-avatar.jpg', + 'profile-images-container' + ); + expect(result.avatar_url).toBe(new_avatar_url); + }); + + it('should update cover and delete old cover file', async () => { + const old_cover_url = 'https://storage.azure.com/covers/old-cover.jpg'; + const new_cover_url = 'https://storage.azure.com/covers/new-cover.jpg'; + + const existing_user = { + id: user_id, + username: 'testuser', + name: 'Test User', + avatar_url: null, + cover_url: old_cover_url, }; - const generate_file_name_spy = jest - .spyOn(azure_storage_service, 'generateFileName') - .mockReturnValue(generated_file_name); + const update_user_dto: UpdateUserDto = { + cover_url: new_cover_url, + }; - const config_get_spy = jest - .spyOn(config_service, 'get') - .mockReturnValue(container_name); + const updated_user = { + ...existing_user, + cover_url: new_cover_url, + }; - const upload_file_spy = jest - .spyOn(azure_storage_service, 'uploadFile') - .mockResolvedValueOnce(uploaded_image_url); + user_repository.findOne.mockResolvedValueOnce(existing_user as any); + user_repository.save.mockResolvedValueOnce(updated_user as any); + azure_storage_service.extractFileName.mockReturnValueOnce('old-cover.jpg'); + config_service.get.mockReturnValueOnce('cover-images-container'); - const result = await service.uploadAvatar(current_user_id, file); + const result = await service.updateUser(user_id, update_user_dto); - expect(generate_file_name_spy).toHaveBeenCalledWith(current_user_id, file.originalname); - expect(generate_file_name_spy).toHaveBeenCalledTimes(1); - expect(config_get_spy).toHaveBeenCalledWith('AZURE_STORAGE_PROFILE_IMAGE_CONTAINER'); - expect(upload_file_spy).toHaveBeenCalledWith( - file.buffer, - generated_file_name, - container_name + expect(azure_storage_service.extractFileName).toHaveBeenCalledWith(old_cover_url); + expect(config_service.get).toHaveBeenCalledWith('AZURE_STORAGE_COVER_IMAGE_CONTAINER'); + expect(azure_storage_service.deleteFile).toHaveBeenCalledWith( + 'old-cover.jpg', + 'cover-images-container' ); - expect(upload_file_spy).toHaveBeenCalledTimes(1); - expect(result).toEqual(mock_response); + expect(result.cover_url).toBe(new_cover_url); }); - it('should throw BadRequestException when file is not provided', async () => { - const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; - const file = null as any; + it('should not delete old avatar when user has no previous avatar', async () => { + const new_avatar_url = 'https://storage.azure.com/profiles/new-avatar.jpg'; - await expect(service.uploadAvatar(current_user_id, file)).rejects.toThrow( - ERROR_MESSAGES.FILE_NOT_FOUND - ); - }); + const existing_user = { + id: user_id, + username: 'testuser', + name: 'Test User', + avatar_url: null, + cover_url: null, + }; - it('should throw BadRequestException when file buffer is missing', async () => { - const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; - const file = { - fieldname: 'file', - originalname: 'avatar.jpg', - encoding: '7bit', - mimetype: 'image/jpeg', - size: 1024, - buffer: null, - } as any; + const update_user_dto: UpdateUserDto = { + avatar_url: new_avatar_url, + }; - await expect(service.uploadAvatar(current_user_id, file)).rejects.toThrow( - ERROR_MESSAGES.FILE_NOT_FOUND - ); + const updated_user = { + ...existing_user, + avatar_url: new_avatar_url, + }; + + user_repository.findOne.mockResolvedValueOnce(existing_user as any); + user_repository.save.mockResolvedValueOnce(updated_user as any); + + await service.updateUser(user_id, update_user_dto); + + expect(azure_storage_service.extractFileName).not.toHaveBeenCalled(); + expect(azure_storage_service.deleteFile).not.toHaveBeenCalled(); }); - it('should throw InternalServerErrorException when upload fails', async () => { - const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + it('should not delete old cover when user has no previous cover', async () => { + const new_cover_url = 'https://storage.azure.com/covers/new-cover.jpg'; + + const existing_user = { + id: user_id, + username: 'testuser', + name: 'Test User', + avatar_url: null, + cover_url: null, + }; + + const update_user_dto: UpdateUserDto = { + cover_url: new_cover_url, + }; + + const updated_user = { + ...existing_user, + cover_url: new_cover_url, + }; + + user_repository.findOne.mockResolvedValueOnce(existing_user as any); + user_repository.save.mockResolvedValueOnce(updated_user as any); + + await service.updateUser(user_id, update_user_dto); + + expect(azure_storage_service.extractFileName).not.toHaveBeenCalled(); + expect(azure_storage_service.deleteFile).not.toHaveBeenCalled(); + }); + + it('should continue when deleting old avatar fails', async () => { + const old_avatar_url = 'https://storage.azure.com/profiles/old-avatar.jpg'; + const new_avatar_url = 'https://storage.azure.com/profiles/new-avatar.jpg'; + + const existing_user = { + id: user_id, + username: 'testuser', + name: 'Test User', + avatar_url: old_avatar_url, + cover_url: null, + }; + + const update_user_dto: UpdateUserDto = { + avatar_url: new_avatar_url, + }; + + const updated_user = { + ...existing_user, + avatar_url: new_avatar_url, + }; + + user_repository.findOne.mockResolvedValueOnce(existing_user as any); + user_repository.save.mockResolvedValueOnce(updated_user as any); + azure_storage_service.extractFileName.mockReturnValueOnce('old-avatar.jpg'); + config_service.get.mockReturnValueOnce('profile-images-container'); + azure_storage_service.deleteFile.mockRejectedValueOnce( + new Error('File not found in storage') + ); + + const console_warn_spy = jest.spyOn(console, 'warn').mockImplementation(); + + const result = await service.updateUser(user_id, update_user_dto); + + expect(result.avatar_url).toBe(new_avatar_url); + expect(console_warn_spy).toHaveBeenCalledWith( + 'Failed to delete old avatar file:', + 'File not found in storage' + ); + expect(es_update_user_job_service.queueUpdateUser).toHaveBeenCalled(); + + console_warn_spy.mockRestore(); + }); + + it('should continue when deleting old cover fails', async () => { + const old_cover_url = 'https://storage.azure.com/covers/old-cover.jpg'; + const new_cover_url = 'https://storage.azure.com/covers/new-cover.jpg'; + + const existing_user = { + id: user_id, + username: 'testuser', + name: 'Test User', + avatar_url: null, + cover_url: old_cover_url, + }; + + const update_user_dto: UpdateUserDto = { + cover_url: new_cover_url, + }; + + const updated_user = { + ...existing_user, + cover_url: new_cover_url, + }; + + user_repository.findOne.mockResolvedValueOnce(existing_user as any); + user_repository.save.mockResolvedValueOnce(updated_user as any); + azure_storage_service.extractFileName.mockReturnValueOnce('old-cover.jpg'); + config_service.get.mockReturnValueOnce('cover-images-container'); + azure_storage_service.deleteFile.mockRejectedValueOnce( + new Error('Storage service unavailable') + ); + + const console_warn_spy = jest.spyOn(console, 'warn').mockImplementation(); + + const result = await service.updateUser(user_id, update_user_dto); + + expect(result.cover_url).toBe(new_cover_url); + expect(console_warn_spy).toHaveBeenCalledWith( + 'Failed to delete old cover file:', + 'Storage service unavailable' + ); + expect(es_update_user_job_service.queueUpdateUser).toHaveBeenCalled(); + + console_warn_spy.mockRestore(); + }); + + it('should update both avatar and cover simultaneously', async () => { + const old_avatar_url = 'https://storage.azure.com/profiles/old-avatar.jpg'; + const old_cover_url = 'https://storage.azure.com/covers/old-cover.jpg'; + const new_avatar_url = 'https://storage.azure.com/profiles/new-avatar.jpg'; + const new_cover_url = 'https://storage.azure.com/covers/new-cover.jpg'; + + const existing_user = { + id: user_id, + username: 'testuser', + name: 'Test User', + avatar_url: old_avatar_url, + cover_url: old_cover_url, + }; + + const update_user_dto: UpdateUserDto = { + avatar_url: new_avatar_url, + cover_url: new_cover_url, + }; + + const updated_user = { + ...existing_user, + avatar_url: new_avatar_url, + cover_url: new_cover_url, + }; + + user_repository.findOne.mockResolvedValueOnce(existing_user as any); + user_repository.save.mockResolvedValueOnce(updated_user as any); + azure_storage_service.extractFileName + .mockReturnValueOnce('old-avatar.jpg') + .mockReturnValueOnce('old-cover.jpg'); + config_service.get + .mockReturnValueOnce('profile-images-container') + .mockReturnValueOnce('cover-images-container'); + + const result = await service.updateUser(user_id, update_user_dto); + + expect(azure_storage_service.deleteFile).toHaveBeenCalledTimes(2); + expect(azure_storage_service.deleteFile).toHaveBeenCalledWith( + 'old-avatar.jpg', + 'profile-images-container' + ); + expect(azure_storage_service.deleteFile).toHaveBeenCalledWith( + 'old-cover.jpg', + 'cover-images-container' + ); + expect(result.avatar_url).toBe(new_avatar_url); + expect(result.cover_url).toBe(new_cover_url); + }); + + it('should update multiple user fields at once', async () => { + const existing_user = { + id: user_id, + username: 'testuser', + name: 'Old Name', + bio: 'Old bio', + }; + + const update_user_dto: UpdateUserDto = { + name: 'New Name', + bio: 'New bio', + }; + + const updated_user = { + ...existing_user, + ...update_user_dto, + }; + + user_repository.findOne.mockResolvedValueOnce(existing_user as any); + user_repository.save.mockResolvedValueOnce(updated_user as any); + + const result = await service.updateUser(user_id, update_user_dto); + + expect(result.name).toBe('New Name'); + expect(result.bio).toBe('New bio'); + }); + + it('should queue elasticsearch update after successful user update', async () => { + const existing_user = { + id: user_id, + username: 'testuser', + name: 'Test User', + }; + + const update_user_dto: UpdateUserDto = { + name: 'Updated Name', + }; + + const updated_user = { + ...existing_user, + name: 'Updated Name', + }; + + user_repository.findOne.mockResolvedValueOnce(existing_user as any); + user_repository.save.mockResolvedValueOnce(updated_user as any); + + await service.updateUser(user_id, update_user_dto); + + expect(es_update_user_job_service.queueUpdateUser).toHaveBeenCalledWith({ + user_id, + }); + expect(es_update_user_job_service.queueUpdateUser).toHaveBeenCalledTimes(1); + }); + + it('should not call deleteFile when avatar_url is undefined in update', async () => { + const existing_user = { + id: user_id, + username: 'testuser', + name: 'Test User', + avatar_url: 'https://storage.azure.com/profiles/avatar.jpg', + cover_url: null, + }; + + const update_user_dto: UpdateUserDto = { + name: 'Updated Name', + }; + + const updated_user = { + ...existing_user, + name: 'Updated Name', + }; + + user_repository.findOne.mockResolvedValueOnce(existing_user as any); + user_repository.save.mockResolvedValueOnce(updated_user as any); + + await service.updateUser(user_id, update_user_dto); + + expect(azure_storage_service.deleteFile).not.toHaveBeenCalled(); + }); + + it('should not call deleteFile when cover_url is undefined in update', async () => { + const existing_user = { + id: user_id, + username: 'testuser', + name: 'Test User', + avatar_url: null, + cover_url: 'https://storage.azure.com/covers/cover.jpg', + }; + + const update_user_dto: UpdateUserDto = { + bio: 'Updated bio', + }; + + const updated_user = { + ...existing_user, + bio: 'Updated bio', + }; + + user_repository.findOne.mockResolvedValueOnce(existing_user as any); + user_repository.save.mockResolvedValueOnce(updated_user as any); + + await service.updateUser(user_id, update_user_dto); + + expect(azure_storage_service.deleteFile).not.toHaveBeenCalled(); + }); + + it('should handle empty update DTO', async () => { + const existing_user = { + id: user_id, + username: 'testuser', + name: 'Test User', + }; + + const update_user_dto: UpdateUserDto = {}; + + user_repository.findOne.mockResolvedValueOnce(existing_user as any); + user_repository.save.mockResolvedValueOnce(existing_user as any); + + const result = await service.updateUser(user_id, update_user_dto); + + expect(user_repository.save).toHaveBeenCalledWith(existing_user); + expect(result.name).toBe('Test User'); + }); + }); + + describe('deleteUser', () => { + beforeEach(() => { + process.env.JWT_TOKEN_EXPIRATION_TIME = '12h'; + }); + + it('should delete user successfully with all cleanup operations', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const mock_jtis = ['jti-1', 'jti-2', 'jti-3']; + + const existing_user: User = { + id: current_user_id, + name: 'Alyaa Ali', + username: 'Alyaa242', + password: 'hashed-password', + email: 'example@gmail.com', + created_at: new Date('2025-10-21T09:26:17.432Z'), + updated_at: new Date('2025-10-21T09:26:17.432Z'), + deleted_at: null, + language: 'ar', + bio: 'Software developer and tech enthusiast.', + avatar_url: 'https://example.com/images/profile.jpg', + cover_url: 'https://example.com/images/cover.jpg', + birth_date: new Date('2003-05-14'), + country: null, + verified: false, + online: false, + followers: 10, + following: 15, + // hashtags: [], + tweets: [], + }; + + const find_one_spy = jest + .spyOn(user_repository, 'findOne') + .mockResolvedValueOnce(existing_user); + + const soft_delete_spy = jest + .spyOn(user_repository, 'softDelete') + .mockResolvedValueOnce({ affected: 1, raw: {}, generatedMaps: [] }); + + const smembers_spy = jest + .spyOn(redis_service, 'smembers') + .mockResolvedValueOnce(mock_jtis); + + const redis_del_spy = jest.spyOn(redis_service, 'del').mockResolvedValue(1); + + const redis_set_spy = jest.spyOn(redis_service, 'set').mockResolvedValueOnce('OK'); + + const extract_avatar_spy = jest + .spyOn(azure_storage_service, 'extractFileName') + .mockReturnValueOnce('profile.jpg'); + + const extract_cover_spy = jest + .spyOn(azure_storage_service, 'extractFileName') + .mockReturnValueOnce('cover.jpg'); + + const delete_avatar_spy = jest + .spyOn(azure_storage_service, 'deleteFile') + .mockResolvedValueOnce(undefined); + + const delete_cover_spy = jest + .spyOn(azure_storage_service, 'deleteFile') + .mockResolvedValueOnce(undefined); + + const queue_delete_spy = jest + .spyOn(es_delete_user_job_service, 'queueDeleteUser') + .mockResolvedValueOnce({ success: true, job_id: 'job-123' }); + + const config_spy = jest + .spyOn(config_service, 'get') + .mockReturnValueOnce('profile-container') + .mockReturnValueOnce('cover-container'); + + await service.deleteUser(current_user_id); + + expect(find_one_spy).toHaveBeenCalledWith({ + where: { id: current_user_id }, + }); + expect(soft_delete_spy).toHaveBeenCalledWith(current_user_id); + + expect(smembers_spy).toHaveBeenCalledWith(`user:${current_user_id}:refreshTokens`); + expect(redis_del_spy).toHaveBeenCalledWith('refresh:jti-1'); + expect(redis_del_spy).toHaveBeenCalledWith('refresh:jti-2'); + expect(redis_del_spy).toHaveBeenCalledWith('refresh:jti-3'); + expect(redis_del_spy).toHaveBeenCalledWith(`user:${current_user_id}:refreshTokens`); + + expect(redis_set_spy).toHaveBeenCalledWith( + `deleted_user:${current_user_id}`, + current_user_id, + 43200 + ); + + expect(extract_avatar_spy).toHaveBeenCalledWith(existing_user.avatar_url); + expect(delete_avatar_spy).toHaveBeenCalledWith('profile.jpg', 'profile-container'); + + expect(extract_cover_spy).toHaveBeenCalledWith(existing_user.cover_url); + expect(delete_cover_spy).toHaveBeenCalledWith('cover.jpg', 'cover-container'); + + expect(queue_delete_spy).toHaveBeenCalledWith({ + user_id: current_user_id, + }); + }); + + it('should delete user without refresh tokens', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + + const existing_user: User = { + id: current_user_id, + name: 'Alyaa Ali', + username: 'Alyaa242', + password: 'hashed-password', + email: 'example@gmail.com', + created_at: new Date('2025-10-21T09:26:17.432Z'), + updated_at: new Date('2025-10-21T09:26:17.432Z'), + deleted_at: null, + language: 'ar', + bio: 'blah', + avatar_url: null, + cover_url: null, + birth_date: new Date('2003-05-14'), + country: null, + verified: false, + online: false, + followers: 10, + following: 15, + tweets: [], + }; + + jest.spyOn(user_repository, 'findOne').mockResolvedValueOnce(existing_user); + jest.spyOn(user_repository, 'softDelete').mockResolvedValueOnce({ + affected: 1, + raw: {}, + generatedMaps: [], + }); + + const smembers_spy = jest.spyOn(redis_service, 'smembers').mockResolvedValueOnce([]); + + const redis_del_spy = jest.spyOn(redis_service, 'del').mockResolvedValue(1); + + const redis_set_spy = jest.spyOn(redis_service, 'set').mockResolvedValueOnce('OK'); + + const queue_delete_spy = jest + .spyOn(es_delete_user_job_service, 'queueDeleteUser') + .mockResolvedValueOnce({ success: true, job_id: 'job-123' }); + + await service.deleteUser(current_user_id); + + expect(smembers_spy).toHaveBeenCalledWith(`user:${current_user_id}:refreshTokens`); + expect(redis_del_spy).toHaveBeenCalledWith(`user:${current_user_id}:refreshTokens`); + expect(redis_set_spy).toHaveBeenCalled(); + expect(queue_delete_spy).toHaveBeenCalled(); + }); + + it('should delete user without avatar and cover images', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + + const existing_user: User = { + id: current_user_id, + name: 'Alyaa Ali', + username: 'Alyaa242', + password: 'hashed-password', + email: 'example@gmail.com', + created_at: new Date('2025-10-21T09:26:17.432Z'), + updated_at: new Date('2025-10-21T09:26:17.432Z'), + deleted_at: null, + language: 'ar', + bio: 'blah', + avatar_url: null, + cover_url: null, + birth_date: new Date('2003-05-14'), + country: null, + verified: false, + online: false, + followers: 10, + following: 15, + tweets: [], + }; + + jest.spyOn(user_repository, 'findOne').mockResolvedValueOnce(existing_user); + jest.spyOn(user_repository, 'softDelete').mockResolvedValueOnce({ + affected: 1, + raw: {}, + generatedMaps: [], + }); + jest.spyOn(redis_service, 'smembers').mockResolvedValueOnce([]); + jest.spyOn(redis_service, 'del').mockResolvedValue(1); + jest.spyOn(redis_service, 'set').mockResolvedValueOnce('OK'); + jest.spyOn(es_delete_user_job_service, 'queueDeleteUser').mockResolvedValueOnce({ + success: true, + job_id: 'job-123', + }); + + const extract_file_spy = jest.spyOn(azure_storage_service, 'extractFileName'); + const delete_file_spy = jest.spyOn(azure_storage_service, 'deleteFile'); + + await service.deleteUser(current_user_id); + + expect(extract_file_spy).not.toHaveBeenCalled(); + expect(delete_file_spy).not.toHaveBeenCalled(); + }); + + it('should continue deletion even if Redis set operation fails', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + + const existing_user: User = { + id: current_user_id, + name: 'Alyaa Ali', + username: 'Alyaa242', + password: 'hashed-password', + email: 'example@gmail.com', + created_at: new Date('2025-10-21T09:26:17.432Z'), + updated_at: new Date('2025-10-21T09:26:17.432Z'), + deleted_at: null, + language: 'ar', + bio: 'blah', + avatar_url: null, + cover_url: null, + birth_date: new Date('2003-05-14'), + country: null, + verified: false, + online: false, + followers: 10, + following: 15, + tweets: [], + }; + + jest.spyOn(user_repository, 'findOne').mockResolvedValueOnce(existing_user); + jest.spyOn(user_repository, 'softDelete').mockResolvedValueOnce({ + affected: 1, + raw: {}, + generatedMaps: [], + }); + jest.spyOn(redis_service, 'smembers').mockResolvedValueOnce([]); + jest.spyOn(redis_service, 'del').mockResolvedValue(1); + + const redis_set_spy = jest + .spyOn(redis_service, 'set') + .mockRejectedValueOnce(new Error('Redis connection failed')); + + const console_warn_spy = jest.spyOn(console, 'warn').mockImplementation(); + + const queue_delete_spy = jest + .spyOn(es_delete_user_job_service, 'queueDeleteUser') + .mockResolvedValueOnce({ success: true, job_id: 'job-123' }); + + await service.deleteUser(current_user_id); + + expect(redis_set_spy).toHaveBeenCalled(); + expect(console_warn_spy).toHaveBeenCalledWith( + 'Failed to store deleted user ID in Redis:', + 'Redis connection failed' + ); + expect(queue_delete_spy).toHaveBeenCalled(); + + console_warn_spy.mockRestore(); + }); + + it('should continue deletion even if avatar deletion fails', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + + const existing_user: User = { + id: current_user_id, + name: 'Alyaa Ali', + username: 'Alyaa242', + password: 'hashed-password', + email: 'example@gmail.com', + created_at: new Date('2025-10-21T09:26:17.432Z'), + updated_at: new Date('2025-10-21T09:26:17.432Z'), + deleted_at: null, + language: 'ar', + bio: 'blah', + avatar_url: 'https://example.com/images/profile.jpg', + cover_url: null, + birth_date: new Date('2003-05-14'), + country: null, + verified: false, + online: false, + followers: 10, + following: 15, + tweets: [], + }; + + jest.spyOn(user_repository, 'findOne').mockResolvedValueOnce(existing_user); + jest.spyOn(user_repository, 'softDelete').mockResolvedValueOnce({ + affected: 1, + raw: {}, + generatedMaps: [], + }); + jest.spyOn(redis_service, 'smembers').mockResolvedValueOnce([]); + jest.spyOn(redis_service, 'del').mockResolvedValue(1); + jest.spyOn(redis_service, 'set').mockResolvedValueOnce('OK'); + jest.spyOn(azure_storage_service, 'extractFileName').mockReturnValueOnce('profile.jpg'); + jest.spyOn(config_service, 'get').mockReturnValueOnce('profile-container'); + + const delete_file_spy = jest + .spyOn(azure_storage_service, 'deleteFile') + .mockRejectedValueOnce(new Error('Azure storage error')); + + const console_warn_spy = jest.spyOn(console, 'warn').mockImplementation(); + + const queue_delete_spy = jest + .spyOn(es_delete_user_job_service, 'queueDeleteUser') + .mockResolvedValueOnce({ success: true, job_id: 'job-123' }); + + await service.deleteUser(current_user_id); + + expect(delete_file_spy).toHaveBeenCalled(); + expect(console_warn_spy).toHaveBeenCalledWith( + 'Failed to delete avatar file:', + 'Azure storage error' + ); + expect(queue_delete_spy).toHaveBeenCalled(); + + console_warn_spy.mockRestore(); + }); + + it('should continue deletion even if cover deletion fails', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + + const existing_user: User = { + id: current_user_id, + name: 'Alyaa Ali', + username: 'Alyaa242', + password: 'hashed-password', + email: 'example@gmail.com', + created_at: new Date('2025-10-21T09:26:17.432Z'), + updated_at: new Date('2025-10-21T09:26:17.432Z'), + deleted_at: null, + language: 'ar', + bio: 'blah', + avatar_url: null, + cover_url: 'https://example.com/images/cover.jpg', + birth_date: new Date('2003-05-14'), + country: null, + verified: false, + online: false, + followers: 10, + following: 15, + tweets: [], + }; + + jest.spyOn(user_repository, 'findOne').mockResolvedValueOnce(existing_user); + jest.spyOn(user_repository, 'softDelete').mockResolvedValueOnce({ + affected: 1, + raw: {}, + generatedMaps: [], + }); + jest.spyOn(redis_service, 'smembers').mockResolvedValueOnce([]); + jest.spyOn(redis_service, 'del').mockResolvedValue(1); + jest.spyOn(redis_service, 'set').mockResolvedValueOnce('OK'); + jest.spyOn(azure_storage_service, 'extractFileName').mockReturnValueOnce('cover.jpg'); + jest.spyOn(config_service, 'get').mockReturnValueOnce('cover-container'); + + const delete_file_spy = jest + .spyOn(azure_storage_service, 'deleteFile') + .mockRejectedValueOnce(new Error('Azure storage error')); + + const console_warn_spy = jest.spyOn(console, 'warn').mockImplementation(); + + const queue_delete_spy = jest + .spyOn(es_delete_user_job_service, 'queueDeleteUser') + .mockResolvedValueOnce({ success: true, job_id: 'job-123' }); + + await service.deleteUser(current_user_id); + + expect(delete_file_spy).toHaveBeenCalled(); + expect(console_warn_spy).toHaveBeenCalledWith( + 'Failed to delete cover file:', + 'Azure storage error' + ); + expect(queue_delete_spy).toHaveBeenCalled(); + + console_warn_spy.mockRestore(); + }); + + it('should parse TTL correctly from environment variable', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + process.env.JWT_TOKEN_EXPIRATION_TIME = '24h'; + + const existing_user: User = { + id: current_user_id, + name: 'Alyaa Ali', + username: 'Alyaa242', + password: 'hashed-password', + email: 'example@gmail.com', + created_at: new Date('2025-10-21T09:26:17.432Z'), + updated_at: new Date('2025-10-21T09:26:17.432Z'), + deleted_at: null, + language: 'ar', + bio: 'blah', + avatar_url: null, + cover_url: null, + birth_date: new Date('2003-05-14'), + country: null, + verified: false, + online: false, + followers: 10, + following: 15, + tweets: [], + }; + + jest.spyOn(user_repository, 'findOne').mockResolvedValueOnce(existing_user); + jest.spyOn(user_repository, 'softDelete').mockResolvedValueOnce({ + affected: 1, + raw: {}, + generatedMaps: [], + }); + jest.spyOn(redis_service, 'smembers').mockResolvedValueOnce([]); + jest.spyOn(redis_service, 'del').mockResolvedValue(1); + + const redis_set_spy = jest.spyOn(redis_service, 'set').mockResolvedValueOnce('OK'); + + jest.spyOn(es_delete_user_job_service, 'queueDeleteUser').mockResolvedValueOnce({ + success: true, + job_id: 'job-123', + }); + + await service.deleteUser(current_user_id); + + expect(redis_set_spy).toHaveBeenCalledWith( + `deleted_user:${current_user_id}`, + current_user_id, + 86400 + ); + }); + + it('should throw NotFoundException when user does not exist', async () => { + const current_user_id = 'nonexistent-user-id'; + + const find_one_spy = jest.spyOn(user_repository, 'findOne').mockResolvedValueOnce(null); + + await expect(service.deleteUser(current_user_id)).rejects.toThrow( + ERROR_MESSAGES.USER_NOT_FOUND + ); + + expect(find_one_spy).toHaveBeenCalledWith({ + where: { id: current_user_id }, + }); + expect(find_one_spy).toHaveBeenCalledTimes(1); + }); + }); + + describe('uploadAvatar', () => { + it('should upload avatar successfully and return file info', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const file = { + fieldname: 'file', + originalname: 'avatar.jpg', + encoding: '7bit', + mimetype: 'image/jpeg', + size: 1024, + buffer: Buffer.from('fake-image-data'), + } as Express.Multer.File; + + const generated_file_name = `${current_user_id}_avatar.jpg`; + const uploaded_image_url = 'https://cdn.app.com/profiles/avatar.jpg'; + const container_name = 'container-name'; + + const mock_response: UploadFileResponseDto = { + image_url: uploaded_image_url, + image_name: generated_file_name, + }; + + const generate_file_name_spy = jest + .spyOn(azure_storage_service, 'generateFileName') + .mockReturnValue(generated_file_name); + + const config_get_spy = jest + .spyOn(config_service, 'get') + .mockReturnValue(container_name); + + const upload_file_spy = jest + .spyOn(azure_storage_service, 'uploadFile') + .mockResolvedValueOnce(uploaded_image_url); + + const result = await service.uploadAvatar(current_user_id, file); + + expect(generate_file_name_spy).toHaveBeenCalledWith(current_user_id, file.originalname); + expect(generate_file_name_spy).toHaveBeenCalledTimes(1); + expect(config_get_spy).toHaveBeenCalledWith('AZURE_STORAGE_PROFILE_IMAGE_CONTAINER'); + expect(upload_file_spy).toHaveBeenCalledWith( + file.buffer, + generated_file_name, + container_name + ); + expect(upload_file_spy).toHaveBeenCalledTimes(1); + expect(result).toEqual(mock_response); + }); + + it('should throw BadRequestException when file is not provided', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const file = null as any; + + await expect(service.uploadAvatar(current_user_id, file)).rejects.toThrow( + ERROR_MESSAGES.FILE_NOT_FOUND + ); + }); + + it('should throw BadRequestException when file buffer is missing', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + const file = { + fieldname: 'file', + originalname: 'avatar.jpg', + encoding: '7bit', + mimetype: 'image/jpeg', + size: 1024, + buffer: null, + } as any; + + await expect(service.uploadAvatar(current_user_id, file)).rejects.toThrow( + ERROR_MESSAGES.FILE_NOT_FOUND + ); + }); + + it('should throw InternalServerErrorException when upload fails', async () => { + const current_user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; const file = { fieldname: 'file', originalname: 'avatar.jpg', @@ -2500,7 +3348,6 @@ describe('UserService', () => { online: false, followers: 10, following: 15, - hashtags: [], tweets: [], }; @@ -3436,4 +4283,181 @@ describe('UserService', () => { expect(find_one_spy).toHaveBeenCalledTimes(1); }); }); + + describe('getUserRelationsCounts', () => { + const user_id = '0c059899-f706-4c8f-97d7-ba2e9fc22d6d'; + + it('should return blocked and muted counts', async () => { + manager.count.mockResolvedValueOnce(5).mockResolvedValueOnce(10); + + const result = await service.getUserRelationsCounts(user_id); + + expect(manager.count).toHaveBeenCalledTimes(2); + expect(manager.count).toHaveBeenNthCalledWith(1, 'user_blocks', { + where: { blocker_id: user_id }, + }); + expect(manager.count).toHaveBeenNthCalledWith(2, 'user_mutes', { + where: { muter_id: user_id }, + }); + expect(result).toEqual({ + blocked_count: 5, + muted_count: 10, + }); + }); + + it('should return zero counts when user has no blocks or mutes', async () => { + manager.count.mockResolvedValueOnce(0).mockResolvedValueOnce(0); + + const result = await service.getUserRelationsCounts(user_id); + + expect(result).toEqual({ + blocked_count: 0, + muted_count: 0, + }); + }); + + it('should return only blocked count when user has no mutes', async () => { + manager.count.mockResolvedValueOnce(25).mockResolvedValueOnce(0); + + const result = await service.getUserRelationsCounts(user_id); + + expect(result).toEqual({ + blocked_count: 25, + muted_count: 0, + }); + }); + + it('should return only muted count when user has no blocks', async () => { + manager.count.mockResolvedValueOnce(0).mockResolvedValueOnce(15); + + const result = await service.getUserRelationsCounts(user_id); + + expect(result).toEqual({ + blocked_count: 0, + muted_count: 15, + }); + }); + + it('should handle large counts', async () => { + manager.count.mockResolvedValueOnce(1000).mockResolvedValueOnce(500); + + const result = await service.getUserRelationsCounts(user_id); + + expect(result).toEqual({ + blocked_count: 1000, + muted_count: 500, + }); + }); + + it('should execute both queries in parallel', async () => { + const blocked_promise = Promise.resolve(5); + const muted_promise = Promise.resolve(10); + + manager.count + .mockReturnValueOnce(blocked_promise as any) + .mockReturnValueOnce(muted_promise as any); + + await service.getUserRelationsCounts(user_id); + + expect(manager.count).toHaveBeenCalledTimes(2); + + expect(manager.count).toHaveBeenNthCalledWith(1, 'user_blocks', { + where: { blocker_id: user_id }, + }); + expect(manager.count).toHaveBeenNthCalledWith(2, 'user_mutes', { + where: { muter_id: user_id }, + }); + }); + + it('should throw error when blocked count query fails', async () => { + manager.count.mockRejectedValueOnce(new Error('Database connection failed')); + + await expect(service.getUserRelationsCounts(user_id)).rejects.toThrow( + 'Database connection failed' + ); + }); + + it('should throw error when muted count query fails', async () => { + manager.count + .mockResolvedValueOnce(5) + .mockRejectedValueOnce(new Error('Query timeout')); + + await expect(service.getUserRelationsCounts(user_id)).rejects.toThrow('Query timeout'); + }); + + it('should throw error when both queries fail', async () => { + manager.count + .mockRejectedValueOnce(new Error('Database error')) + .mockRejectedValueOnce(new Error('Another error')); + + await expect(service.getUserRelationsCounts(user_id)).rejects.toThrow('Database error'); + }); + + it('should use correct entity manager', async () => { + manager.count.mockResolvedValueOnce(5).mockResolvedValueOnce(10); + + await service.getUserRelationsCounts(user_id); + + expect(user_repository.manager).toBe(manager); + expect(manager.count).toHaveBeenCalled(); + }); + + it('should return UserRelationsResponseDto structure', async () => { + manager.count.mockResolvedValueOnce(3).mockResolvedValueOnce(7); + + const result = await service.getUserRelationsCounts(user_id); + + expect(result).toHaveProperty('blocked_count'); + expect(result).toHaveProperty('muted_count'); + expect(typeof result.blocked_count).toBe('number'); + expect(typeof result.muted_count).toBe('number'); + }); + + it('should handle different user IDs correctly', async () => { + const another_user_id = '1a8e9906-65bb-4fa4-a614-ecc6a434ee94'; + + manager.count.mockResolvedValueOnce(2).mockResolvedValueOnce(3); + + await service.getUserRelationsCounts(another_user_id); + + expect(manager.count).toHaveBeenCalledWith('user_blocks', { + where: { blocker_id: another_user_id }, + }); + expect(manager.count).toHaveBeenCalledWith('user_mutes', { + where: { muter_id: another_user_id }, + }); + }); + + it('should query correct tables with correct where conditions', async () => { + manager.count.mockResolvedValueOnce(5).mockResolvedValueOnce(10); + + await service.getUserRelationsCounts(user_id); + + expect(manager.count).toHaveBeenNthCalledWith(1, 'user_blocks', { + where: { blocker_id: user_id }, + }); + + expect(manager.count).toHaveBeenNthCalledWith(2, 'user_mutes', { + where: { muter_id: user_id }, + }); + }); + + it('should handle very large count numbers', async () => { + manager.count.mockResolvedValueOnce(999999).mockResolvedValueOnce(888888); + + const result = await service.getUserRelationsCounts(user_id); + + expect(result.blocked_count).toBe(999999); + expect(result.muted_count).toBe(888888); + }); + + it('should return counts with correct types', async () => { + manager.count.mockResolvedValueOnce(5).mockResolvedValueOnce(10); + + const result = await service.getUserRelationsCounts(user_id); + + expect(Number.isInteger(result.blocked_count)).toBe(true); + expect(Number.isInteger(result.muted_count)).toBe(true); + }); + }); }); diff --git a/src/user/user.service.ts b/src/user/user.service.ts index 0a8c0a04..061e8102 100644 --- a/src/user/user.service.ts +++ b/src/user/user.service.ts @@ -6,21 +6,15 @@ import { InternalServerErrorException, NotFoundException, } from '@nestjs/common'; -import { CreateUserDto } from './dto/create-user.dto'; import { In, Repository } from 'typeorm'; -import { User } from './entities/user.entity'; import { InjectRepository } from '@nestjs/typeorm'; import { UserProfileDto } from './dto/user-profile.dto'; -import { instanceToInstance, plainToInstance } from 'class-transformer'; +import { plainToInstance } from 'class-transformer'; import { ERROR_MESSAGES } from 'src/constants/swagger-messages'; -import { SelectQueryBuilder } from 'typeorm/browser'; import { DetailedUserProfileDto } from './dto/detailed-user-profile.dto'; -import { MutualFollowerDto } from './dto/mutual-follower.dto'; import { GetFollowersDto } from './dto/get-followers.dto'; import { UserListItemDto } from './dto/user-list-item.dto'; -import { PaginationParamsDto } from './dto/pagination-params.dto'; import { UserRepository } from './user.repository'; -import { UserFollows } from './entities'; import { RelationshipType } from './enums/relationship-type.enum'; import { UpdateUserDto } from './dto/update-user.dto'; import { GetUsersByIdDto } from './dto/get-users-by-id.dto'; @@ -32,10 +26,7 @@ import { AssignInterestsDto } from './dto/assign-interests.dto'; import { Category } from 'src/category/entities'; import { ChangeLanguageDto } from './dto/change-language.dto'; import { DeleteFileDto } from './dto/delete-file.dto'; -import { delete_cover } from './user.swagger'; -import { promises } from 'dns'; import { UploadFileResponseDto } from './dto/upload-file-response.dto'; -import { TweetsService } from 'src/tweets/tweets.service'; import { ChangeLanguageResponseDto } from './dto/change-language-response.dto'; import { TweetsRepository } from 'src/tweets/tweets.repository'; import { CursorPaginationDto } from './dto/cursor-pagination-params.dto'; @@ -49,6 +40,12 @@ import { EsUpdateUserJobService } from 'src/background-jobs/elasticsearch/es-upd import { EsDeleteUserJobService } from 'src/background-jobs/elasticsearch/es-delete-user.service'; import { EsFollowJobService } from 'src/background-jobs/elasticsearch/es-follow.service'; import { UserRelationsResponseDto } from './dto/user-relations-response.dto'; +import { InitTimelineQueueJobService } from 'src/background-jobs/timeline/timeline.service'; +import { IInitTimelineQueueJobDTO } from 'src/background-jobs/timeline/timeline.dto'; +import { TimelineRedisService } from 'src/timeline/services/timeline-redis.service'; +import { TimelineCandidatesService } from 'src/timeline/services/timeline-candidates.service'; +import { RedisService } from 'src/redis/redis.service'; +import { REFRESH_TOKEN_KEY, USER_REFRESH_TOKENS_KEY } from 'src/constants/redis'; @Injectable() export class UserService { @@ -64,7 +61,11 @@ export class UserService { private readonly follow_job_service: FollowJobService, private readonly es_update_user_job_service: EsUpdateUserJobService, private readonly es_delete_user_job_service: EsDeleteUserJobService, - private readonly es_follow_job_service: EsFollowJobService + private readonly es_follow_job_service: EsFollowJobService, + private readonly init_timeline_queue_job_service: InitTimelineQueueJobService, + private readonly timeline_redis_service: TimelineRedisService, + private readonly timeline_candidates_service: TimelineCandidatesService, + private readonly redis_service: RedisService ) {} async getUsersByIds( @@ -322,13 +323,14 @@ export class UserService { if (current_user_id === target_user_id) { throw new BadRequestException(ERROR_MESSAGES.CANNOT_FOLLOW_YOURSELF); } - const [validation_result, follow_permissions] = await Promise.all([ + const [validation_result, follow_permissions, current_user] = await Promise.all([ this.user_repository.validateRelationshipRequest( current_user_id, target_user_id, RelationshipType.FOLLOW ), this.user_repository.verifyFollowPermissions(current_user_id, target_user_id), + this.user_repository.findOne({ where: { id: current_user_id } }), ]); console.log('validation_result: ', validation_result); @@ -337,6 +339,10 @@ export class UserService { throw new NotFoundException(ERROR_MESSAGES.USER_NOT_FOUND); } + if (!current_user) { + throw new NotFoundException(ERROR_MESSAGES.USER_NOT_FOUND); + } + if (validation_result.relationship_exists) { throw new ConflictException(ERROR_MESSAGES.ALREADY_FOLLOWING); } @@ -355,8 +361,9 @@ export class UserService { follower_id: current_user_id, followed_id: target_user_id, action: 'add', - follower_avatar_url: validation_result.avatar_url, - follower_name: validation_result.name, + follower_avatar_url: current_user.avatar_url || undefined, + follower_name: current_user.name, + follower_username: current_user.username, }); await this.es_follow_job_service.queueEsFollow({ @@ -639,6 +646,34 @@ export class UserService { await this.user_repository.softDelete(current_user_id); + const user_tokens_key = USER_REFRESH_TOKENS_KEY(current_user_id); + const refresh_token_jtis = await this.redis_service.smembers(user_tokens_key); + + if (refresh_token_jtis && refresh_token_jtis.length > 0) { + const delete_promises = refresh_token_jtis.map((jti) => { + const token_key = REFRESH_TOKEN_KEY(jti); + return this.redis_service.del(token_key); + }); + + await Promise.all(delete_promises); + console.log('deleted tokens successfully'); + } + + await this.redis_service.del(user_tokens_key); + + try { + const ttl_string = process.env.JWT_TOKEN_EXPIRATION_TIME || '12h'; + const ttl_seconds = this.parseDurationToSeconds(ttl_string); + + await this.redis_service.set( + `deleted_user:${current_user_id}`, + current_user_id, + ttl_seconds + ); + } catch (error) { + console.warn('Failed to store deleted user ID in Redis:', error.message); + } + if (user.avatar_url) { const file_name = this.azure_storage_service.extractFileName(user.avatar_url); @@ -790,6 +825,52 @@ export class UserService { })); await this.user_repository.insertUserInterests(user_interests); + + // Trigger background job to initialize timeline queue + // await this.init_timeline_queue_job_service.queueInitTimelineQueue({ + // user_id, + // }); + + await this.handleInitTimelineQueue({ user_id }); + } + + async handleInitTimelineQueue(job_data: IInitTimelineQueueJobDTO) { + const { user_id } = job_data; + + try { + console.log(`[Timeline] Initializing queue for user ${user_id}`); + + // Get existing tweet IDs in queue (should be empty for init, but check anyway) + const existing_tweet_ids = + await this.timeline_redis_service.getTweetIdsInQueue(user_id); + + // Get candidates + const candidates = await this.timeline_candidates_service.getCandidates( + user_id, + existing_tweet_ids, + 100 // Fetch up to 100 candidates for initialization + ); + + if (candidates.length === 0) { + console.log(`[Timeline] No candidates found for user ${user_id}`); + return; + } + + // Initialize queue with candidates + const tweets = candidates.map((c) => ({ + tweet_id: c.tweet_id, + created_at: c.created_at.toISOString(), + })); + + const queue_size = await this.timeline_redis_service.initializeQueue(user_id, tweets); + + console.log( + `[Timeline] Initialized queue for user ${user_id} with ${queue_size} tweets` + ); + } catch (error) { + console.error(`[Timeline] Error initializing queue for user ${user_id}:`, error); + throw error; + } } async changeLanguage( @@ -828,4 +909,27 @@ export class UserService { return { blocked_count, muted_count }; } + + private parseDurationToSeconds(duration: string): number { + const match = duration.match(/^(\d+)([smhd])$/); + if (!match) { + return 12 * 60 * 60; + } + + const value = parseInt(match[1]); + const unit = match[2]; + + switch (unit) { + case 's': + return value; + case 'm': + return value * 60; + case 'h': + return value * 60 * 60; + case 'd': + return value * 24 * 60 * 60; + default: + return 12 * 60 * 60; + } + } } diff --git a/src/validations/birth-date.ts b/src/validations/birth-date.ts index 72bbca3f..64460bfb 100644 --- a/src/validations/birth-date.ts +++ b/src/validations/birth-date.ts @@ -20,7 +20,7 @@ export class AgeRangeValidator implements ValidatorConstraintInterface { const today = new Date(); // Check if date is valid - if (isNaN(birth_date.getTime())) { + if (Number.isNaN(birth_date.getTime())) { this.age_calculation_result = { age: 0, is_valid: false, reason: 'invalid' }; return false; } @@ -59,7 +59,7 @@ export class AgeRangeValidator implements ValidatorConstraintInterface { return `User age must be between ${min_age} and ${max_age} years`; } - const { age, reason } = this.age_calculation_result; + const { reason } = this.age_calculation_result; switch (reason) { case 'too_young':