diff --git a/.env.local b/.env.local new file mode 100644 index 0000000000..ae19f6a799 --- /dev/null +++ b/.env.local @@ -0,0 +1,30 @@ +USER_SERVICE_NAME=user-express +USER_EXPRESS_PORT=9001 +USER_EXPRESS_DB_PORT=5431 +USER_PGDATA="/data/user-db" + +QUESTION_SERVICE_NAME=question-express +QUESTION_EXPRESS_PORT=9002 +QUESTION_EXPRESS_DB_PORT=5433 +QUESTION_PGDATA="/data/qn-db" + +COLLAB_SERVICE_NAME=collab-express +COLLAB_EXPRESS_PORT=9003 +COLLAB_EXPRESS_DB_PORT=5434 +COLLAB_PGDATA="/data/collab-db" +OPENAI_API_KEY="" + +MATCHING_SERVICE_NAME=match-express +MATCHING_EXPRESS_PORT=9004 +MATCHING_DB_USERNAME="peerprep-match-express" +MATCHING_DB_PASSWORD="password" +MATCHING_DB_HOST_PORT=6378 +MATCHING_DB_HOST_MGMT_PORT=3001 + +CHAT_SERVICE_NAME=chat-express +CHAT_EXPRESS_PORT=9005 +CHAT_EXPRESS_DB_PORT=5435 +CHAT_PGDATA="/data/chat-db" + +FRONTEND_SERVICE_NAME=frontend +FRONTEND_PORT=3000 \ No newline at end of file diff --git a/.eslintignore b/.eslintignore new file mode 100644 index 0000000000..a3f9bfe965 --- /dev/null +++ b/.eslintignore @@ -0,0 +1,4 @@ +node_modules +**/node_modules + +**/dist diff --git a/.eslintrc.json b/.eslintrc.json new file mode 100644 index 0000000000..845dc1c5e4 --- /dev/null +++ b/.eslintrc.json @@ -0,0 +1,113 @@ +{ + "extends": [ + "eslint:recommended", + "plugin:@typescript-eslint/recommended", + "prettier", + "plugin:tailwindcss/recommended", + "plugin:@tanstack/eslint-plugin-query/recommended" + // "plugin:@tanstack/eslint-plugin-query" + ], + "parserOptions": { + "ecmaVersion": "latest", + "sourceType": "module" + }, + "plugins": [ + "import", + "unused-imports", + "react", + "@typescript-eslint", + "tailwindcss", + "react-hooks", + "simple-import-sort" + ], + "rules": { + "@typescript-eslint/array-type": [ + "error", + { + "default": "generic", + "readonly": "generic" + } + ], + "@typescript-eslint/no-explicit-any": "warn", + "@typescript-eslint/explicit-module-boundary-types": "off", + "@typescript-eslint/explicit-function-return-type": 0, + "@typescript-eslint/naming-convention": [ + "error", + { + "selector": "interface", + "format": ["PascalCase"] + } + ], + "@typescript-eslint/explicit-member-accessibility": 0, + "@typescript-eslint/no-empty-function": 0, + "@typescript-eslint/no-empty-interface": "off", + "@typescript-eslint/no-unused-vars": 0, + "@typescript-eslint/no-use-before-define": 0, + + // Basic + "array-callback-return": "warn", + "no-console": "warn", + "no-multiple-empty-lines": ["error", { "max":1 }], + "no-prototype-builtins": 0, + // "no-expected-multiline": "warn", // can"t find rule definition + + "padding-line-between-statements": [ + "warn", + { "blankLine": "always", "prev": "*", "next": "block" }, + { "blankLine": "always", "prev": "block", "next": "*" }, + { "blankLine": "always", "prev": "*", "next": "block-like" }, + { "blankLine": "always", "prev": "block-like", "next": "*" } + ], + + // React + "react/display-name": 0, + "react/no-unescaped-entities": ["error", { "forbid": [">", "}", "\""] }], + "react/no-unknown-property": "off", + "react/self-closing-comp": [ + "error", + { + "component": true, + "html": true + } + ], + "react-hooks/rules-of-hooks": "off", + "simple-import-sort/imports": "error", + "simple-import-sort/exports": "error", + "tailwindcss/enforces-negative-arbitrary-values": "off", + "unused-imports/no-unused-imports": "error", + "unused-imports/no-unused-vars": [ + "warn", + { + "vars": "all", + "varsIgnorePattern": "^_", + "args": "after-used", + "argsIgnorePattern": "^_" + } + ] + }, + "ignorePatterns": ["**/*.js"], + "overrides": [ + { + "files": ["frontend/**/*.tsx"], + "rules": { + "react/prop-types": "off" + } + }, + { + "files": ["*.js", "*.ts", "*.tsx"], + "rules": { + "simple-import-sort/imports": [ + "error", + { + "groups": [ + ["^(?:os|path|http|fs|crypto|util|events|stream|url|zlib|querystring|tls|dgram|net|dns|child_process|cluster|readline|vm|assert|buffer|process|timers)(\/.*)?$"], + ["^(?!(@\/|\\.\\.\/|\\.\/))"], + ["^@\/"], + ["^(?:\\.\/|\\.\\.\/|\\.)"] + ] + } + ] + } + } + ] +} diff --git a/.github/workflows/build-deploy-docker.yaml b/.github/workflows/build-deploy-docker.yaml new file mode 100644 index 0000000000..3f579c26a0 --- /dev/null +++ b/.github/workflows/build-deploy-docker.yaml @@ -0,0 +1,258 @@ +name: Build & publish PeerPrep images +on: + pull_request: + types: + - opened + - reopened + - synchronize + - ready_for_review + push: + branches: + - main + +env: + DOCKER_REGISTRY_USN: ay2425s1cs3219g16 + USER_EXPRESS_PORT: 9001 + QUESTION_EXPRESS_PORT: 9002 + COLLAB_EXPRESS_PORT: 9003 + MATCH_EXPRESS_PORT: 9004 + CHAT_EXPRESS_PORT: 9005 + FRONTEND_PORT: 3000 + +jobs: + changes: + if: ${{ !github.event.pull_request.draft && github.event.pull_request.title != 'Feedback' }} + runs-on: ubuntu-latest + # Required permissions + permissions: + pull-requests: read + # Set job outputs to values from filter step + outputs: + matrix: ${{ steps.set-matrix.outputs.matrix }} # Output the matrix as a JSON string + steps: + - uses: actions/checkout@v4 + if: contains(github.ref, 'main') + # For pull requests it's not necessary to checkout the code + - uses: dorny/paths-filter@v3 + id: filter + with: + filters: | + user: + - 'backend/user/**' + question: + - 'backend/question/**' + collaboration: + - 'backend/collaboration/**' + matching: + - 'backend/matching/**' + chat: + - 'backend/chat/**' + frontend: + - 'frontend/**' + - name: output-job-matrix + id: set-matrix + run: | + is_main=${{ contains(github.ref, 'main') }} + matrix=() + if [[ "${{ steps.filter.outputs.user }}" == "true" || "$is_main" == "true" ]]; then + config=$(jq -n \ + --arg pkg "user" \ + --arg img "$DOCKER_REGISTRY_USN/user-express" \ + --arg ctx "./backend/user" \ + --arg dkr "./backend/user/express.Dockerfile" \ + --arg bag "port=$USER_EXPRESS_PORT" \ + '{package: $pkg, image: $img, context: $ctx, dockerfile: $dkr, "build-args": $bag}') + matrix+=("$config") + fi + if [[ "${{ steps.filter.outputs.question }}" == "true" || "$is_main" == "true" ]]; then + config=$(jq -n \ + --arg pkg "question" \ + --arg img "$DOCKER_REGISTRY_USN/question-express" \ + --arg ctx "./backend/question" \ + --arg dkr "./backend/question/express.Dockerfile" \ + --arg bag "port=$QUESTION_EXPRESS_PORT" \ + '{package: $pkg, image: $img, context: $ctx, dockerfile: $dkr, "build-args": $bag}') + matrix+=("$config") + fi + if [[ "${{ steps.filter.outputs.collaboration }}" == "true" || "$is_main" == "true" ]]; then + config=$(jq -n \ + --arg pkg "collaboration" \ + --arg img "$DOCKER_REGISTRY_USN/collab-express" \ + --arg ctx "./backend/collaboration" \ + --arg dkr "./backend/collaboration/express.Dockerfile" \ + --arg bag "port=$COLLAB_EXPRESS_PORT" \ + '{package: $pkg, image: $img, context: $ctx, dockerfile: $dkr, "build-args": $bag}') + matrix+=("$config") + fi + if [[ "${{ steps.filter.outputs.matching }}" == "true" || "$is_main" == "true" ]]; then + config=$(jq -n \ + --arg pkg "matching" \ + --arg img "$DOCKER_REGISTRY_USN/match-express" \ + --arg ctx "./backend/matching" \ + --arg dkr "./backend/matching/express.Dockerfile" \ + --arg bag "port=$MATCH_EXPRESS_PORT" \ + '{package: $pkg, image: $img, context: $ctx, dockerfile: $dkr, "build-args": $bag}') + matrix+=("$config") + fi + if [[ "${{ steps.filter.outputs.chat }}" == "true" || "$is_main" == "true" ]]; then + config=$(jq -n \ + --arg pkg "chat" \ + --arg img "$DOCKER_REGISTRY_USN/chat-express" \ + --arg ctx "./backend/chat" \ + --arg dkr "./backend/chat/express.Dockerfile" \ + --arg bag "port=$CHAT_EXPRESS_PORT" \ + '{package: $pkg, image: $img, context: $ctx, dockerfile: $dkr, "build-args": $bag}') + matrix+=("$config") + fi + if [[ "${{ steps.filter.outputs.frontend }}" == "true" || "$is_main" == "true" ]]; then + config=$(jq -n \ + --arg pkg "frontend" \ + --arg img "$DOCKER_REGISTRY_USN/frontend" \ + --arg ctx "./frontend" \ + --arg dkr "./frontend/frontend.Dockerfile" \ + --arg bag "port=$FRONTEND_PORT" \ + '{package: $pkg, image: $img, context: $ctx, dockerfile: $dkr, "build-args": $bag}') + matrix+=("$config") + fi + formatted_matrix=$(echo "${matrix[@]}" | jq -cs .) + echo "Outputs Generated: $formatted_matrix" + echo "matrix=$formatted_matrix" >> $GITHUB_OUTPUT + + build-push-deploy-image: + needs: changes + if: ${{ fromJson(needs.changes.outputs.matrix)[0] != null }} + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + include: ${{ fromJson(needs.changes.outputs.matrix) }} # Use the matrix from the first job + # - package: user + # image: ay2425s1cs3219g16/user-express + # context: ./backend/user + # dockerfile: ./backend/user/express.Dockerfile + # build-args: | + # port=9001 + # - package: question + # image: ay2425s1cs3219g16/question-express + # context: ./backend/question + # dockerfile: ./backend/question/express.Dockerfile + # build-args: | + # port=9002 + # - package: collaboration + # image: ay2425s1cs3219g16/collab-express + # context: ./backend/collaboration + # dockerfile: ./backend/collaboration/express.Dockerfile + # build-args: | + # port=9003 + # - package: matching + # image: ay2425s1cs3219g16/match-express + # context: ./backend/matching + # dockerfile: ./backend/matching/express.Dockerfile + # build-args: | + # port=9004 + # - package: frontend + # image: ay2425s1cs3219g16/frontend + # context: ./frontend + # dockerfile: ./frontend/frontend.Dockerfile + # build-args: | + # port=3000 + permissions: + contents: read + packages: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ matrix.image }} + tags: | + type=ref,event=branch + type=ref,event=pr + type=raw,value=latest,enable={{is_default_branch}} + + - name: Build and push Docker images for PeerPrep Services + uses: docker/build-push-action@v6 + with: + platforms: linux/amd64,linux/arm64 + context: ${{ matrix.context }} + file: ${{ matrix.dockerfile }} + build-args: ${{ matrix.build-args }} + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Auth GCloud + id: gcloud-auth + uses: google-github-actions/auth@v2 + if: ${{ contains(github.ref, 'main') && github.event.pull_request.title != 'Feedback' }} + with: + credentials_json: '${{ secrets.GKE_CJSON }}' + continue-on-error: true + + - name: Detect GKE Cluster + id: gcloud-detect-gke + if: ${{ contains(github.ref, 'main') && github.event.pull_request.title != 'Feedback' }} + run: |- + prev_step_success="${{ steps.gcloud-auth.outcome }}" + if [[ "$prev_step_success" != "success" ]]; then + echo "is_up=false" >> $GITHUB_OUTPUT + else + if [[ -z "$(gcloud container clusters list | grep ${{ secrets.GKE_CLUSTER }})" ]]; then + echo "is_up=false" >> $GITHUB_OUTPUT + else + echo "is_up=true" >> $GITHUB_OUTPUT + fi + fi + + - name: Get GKE creds + id: gcloud-get-gke-creds + uses: google-github-actions/get-gke-credentials@v2 + if: ${{ contains(github.ref, 'main') && github.event.pull_request.title != 'Feedback' && steps.gcloud-detect-gke.outputs.is_up == 'true' }} + with: + cluster_name: ${{ secrets.GKE_CLUSTER }} + location: ${{ secrets.GKE_ZONE }} + + - name: Deploy to GKE + id: gcloud-deploy-gke + if: ${{ contains(github.ref, 'main') && github.event.pull_request.title != 'Feedback' && steps.gcloud-detect-gke.outputs.is_up == 'true' }} + run: |- + service="${{ matrix.package }}" + deployment="frontend" + if [[ "$service" == "collaboration" ]]; then + deployment="collab-service" + elif [[ "$service" != "frontend" ]]; then + deployment="$service-service" + fi + kubectl -n peerprep rollout restart deployment "$deployment" + + results: + if: ${{ always() && !github.event.pull_request.draft }} + runs-on: ubuntu-latest + name: Final Results + needs: build-push-deploy-image + steps: + - run: | + result="${{ needs.build-push-deploy-image.result }}" + if [[ $result == "success" || $result == "skipped" ]]; then + exit 0 + else + exit 1 + fi diff --git a/.github/workflows/clear-cache.yaml b/.github/workflows/clear-cache.yaml new file mode 100644 index 0000000000..3b6834708f --- /dev/null +++ b/.github/workflows/clear-cache.yaml @@ -0,0 +1,40 @@ +name: Cleanup caches after merge +on: + pull_request: + types: + - closed + workflow_dispatch: + +jobs: + cleanup: + runs-on: ubuntu-latest + permissions: + # `actions:write` permission is required to delete caches + # See also: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#delete-a-github-actions-cache-for-a-repository-using-a-cache-id + actions: write + contents: read + + steps: + - name: Check out code + uses: actions/checkout@v4 + + - name: Cleanup cache + run: | + gh extension install actions/gh-actions-cache + + REPO=${{ github.repository }} + BRANCH=refs/pull/${{ github.event.pull_request.number }}/merge + + echo "Fetching list of cache key" + cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH --limit 100 --sort size | cut -f 1 ) + + ## Setting this to not fail the workflow while deleting cache keys. + set +e + echo "Deleting caches..." + for cacheKey in $cacheKeysForPR + do + gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm + done + echo "Done" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000..e723580f96 --- /dev/null +++ b/.gitignore @@ -0,0 +1,14 @@ +# Node +node_modules +**/node_modules + +# MacOS +.DS_Store + +# Build Artifacts +dist +**/dist + +# Secrets +.env +**/.env diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100644 index 0000000000..a466e22d9f --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1,4 @@ + +npx lint-staged + +"$(pwd)/scripts/inject-openai-key.sh" diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 0000000000..01139d178e --- /dev/null +++ b/.prettierrc @@ -0,0 +1,9 @@ +{ + "useTabs": false, + "tabWidth": 2, + "singleQuote": true, + "jsxSingleQuote": true, + "quoteProps": "as-needed", + "trailingComma": "es5", + "printWidth": 100 +} diff --git a/Makefile b/Makefile new file mode 100644 index 0000000000..edd7a3b4e1 --- /dev/null +++ b/Makefile @@ -0,0 +1,28 @@ +setup: + ./scripts/install-deps.sh + ./scripts/ensure-volume.sh + +migrate-seed: + ./scripts/migrate-seed-databases.sh + +db-up: + ./scripts/ensure-volume.sh + docker compose --env-file .env.local -f docker-compose.local.yaml up -d + +db-down: + docker compose --env-file .env.local -f docker-compose.local.yaml down + echo 'y' | docker volume prune + +up: + ./scripts/ensure-volume.sh + docker compose --env-file .env.local up -d + +down: + docker compose --env-file .env.local down + echo 'y' | docker volume prune + +k8s-up: + ./scripts/k8s-up.sh + +k8s-down: + ./scripts/k8s-down.sh diff --git a/README.md b/README.md index 259f7bba2e..9e6090b4ba 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,48 @@ +[![Review Assignment Due Date](https://classroom.github.com/assets/deadline-readme-button-22041afd0340ce965d47ae6ef1cefeee28c7c493a6346c4f15d667ab976d596c.svg)](https://classroom.github.com/a/bzPrOe11) + # CS3219 Project (PeerPrep) - AY2425S1 -## Group: Gxx -### Note: -- You can choose to develop individual microservices within separate folders within this repository **OR** use individual repositories (all public) for each microservice. -- In the latter scenario, you should enable sub-modules on this GitHub classroom repository to manage the development/deployment **AND** add your mentor to the individual repositories as a collaborator. -- The teaching team should be given access to the repositories as we may require viewing the history of the repository in case of any disputes or disagreements. +## Group: G16 + +### Note + +- You can choose to develop individual microservices within separate folders within this repository **OR** use individual repositories (all public) for each microservice. +- In the latter scenario, you should enable sub-modules on this GitHub classroom repository to manage the development/deployment **AND** add your mentor to the individual repositories as a collaborator. +- The teaching team should be given access to the repositories as we may require viewing the history of the repository in case of any disputes or disagreements. + +## Architecture + +architecture diagram + +We develop the following micro-services: + +- [`/backend/user`](./backend/user/README.md): Express service for user accounts, authentication. +- [`/backend/question`](./backend/question/README.md): Express service for question metadata, attempt history. +- [`/backend/matching`](./backend/matching/README.md): Express service for user game matching, via websockets and Redis Streams. +- [`/backend/collaboration`](./backend/collaboration/README.md): Express service for collaborative editing, via YJS and websockets. +- [`/backend/chat`](./backend/chat/README.md): Express service for peer-2-peer chatting. +- [`/frontend`](./frontend/README.md): A UI view layer for the application, built with React Router and Vite, and served with Nginx. + +### Hosting/Running + +We run this application using Kubernetes, and host this application on Google Kubernetes Service. The documentation can be found [here](./k8s/README.md). + +### Docker Compose + +Should you desire to rebuild the images, make use of the various commands in our [`Makefile`](./Makefile). + +Ideally, you should have Docker running, and you should run the commands in this sequence: + +1. `make setup` (Optional) + + - This sets up dependencies and creates the necessary persistent volumes. + +2. `make up` + + - In addition to the above, it also: + - Runs the migrations/seeding for each service's database within their own Docker container, once running. + - Builds and starts the services in order. diff --git a/backend/chat/.dockerignore b/backend/chat/.dockerignore new file mode 100644 index 0000000000..d26c7464b6 --- /dev/null +++ b/backend/chat/.dockerignore @@ -0,0 +1,2 @@ +node_modules +dist/ \ No newline at end of file diff --git a/backend/chat/.env.compose b/backend/chat/.env.compose new file mode 100644 index 0000000000..7dbe4154ad --- /dev/null +++ b/backend/chat/.env.compose @@ -0,0 +1,7 @@ +EXPRESS_PORT=9005 +EXPRESS_DB_HOST=chat-db +EXPRESS_DB_PORT=5432 +POSTGRES_DB=chat +POSTGRES_USER=peerprep-chat-express +POSTGRES_PASSWORD=Xk8qEcEI2sizjfEn/lF6mLqiyBECjIHY3q6sdXf9poQ= +PGDATA="/data/chat-db" diff --git a/backend/chat/.env.docker b/backend/chat/.env.docker new file mode 100644 index 0000000000..cc5972ea1b --- /dev/null +++ b/backend/chat/.env.docker @@ -0,0 +1,9 @@ +PEERPREP_UI_HOST=http://host.docker.internal:5173 + +EXPRESS_PORT=9005 +EXPRESS_DB_HOST=host.docker.internal +EXPRESS_DB_PORT=5435 +POSTGRES_DB=chat +POSTGRES_USER=peerprep-chat-express +POSTGRES_PASSWORD=Xk8qEcEI2sizjfEn/lF6mLqiyBECjIHY3q6sdXf9poQ= +PGDATA=/data/chat-db diff --git a/backend/chat/.env.local b/backend/chat/.env.local new file mode 100644 index 0000000000..da18c38a94 --- /dev/null +++ b/backend/chat/.env.local @@ -0,0 +1,9 @@ +PEERPREP_UI_HOST=http://localhost:5173 + +EXPRESS_PORT=9005 +EXPRESS_DB_HOST=localhost +EXPRESS_DB_PORT=5435 +POSTGRES_DB=chat +POSTGRES_USER=peerprep-chat-express +POSTGRES_PASSWORD=Xk8qEcEI2sizjfEn/lF6mLqiyBECjIHY3q6sdXf9poQ= +PGDATA=/data/chat-db diff --git a/backend/chat/README.md b/backend/chat/README.md new file mode 100644 index 0000000000..59a9a23552 --- /dev/null +++ b/backend/chat/README.md @@ -0,0 +1,30 @@ +# Matching Service + +## Running with Docker (Standalone) + +1. Run this command to build: + ```sh + docker build \ + -t chat-express-local \ + --build-arg port=9005 \ + -f express.Dockerfile . + ``` +2. Run this command, from the roxot folder: + + ```sh + make db-up + ``` + +3. Run the necessary migrate and seed commands, if you haven't yet. + +4. Run this command to expose the container: + ```sh + docker run -p 9005:9005 --env-file ./.env.docker chat-express-local + ``` +5. To stop the process, use the Docker UI or CLI with `docker rm -f ` (The child process loop has issues terminating) + +## Running with Docker-Compose (Main config) + +Edit the variables in the `.env.compose` file and run `make up` from the root folder. + +Any startup instructions will be run from `entrypoint.sh` instead. diff --git a/backend/chat/drizzle.config.ts b/backend/chat/drizzle.config.ts new file mode 100644 index 0000000000..b95650e9d9 --- /dev/null +++ b/backend/chat/drizzle.config.ts @@ -0,0 +1,16 @@ +import { defineConfig } from 'drizzle-kit'; + +const config = { + host: process.env.EXPRESS_DB_HOST!, + port: Number.parseInt(process.env.EXPRESS_DB_PORT!), + database: process.env.POSTGRES_DB!, + user: process.env.POSTGRES_USER, + password: process.env.POSTGRES_PASSWORD, +}; + +export default defineConfig({ + schema: './src/lib/db/schema.ts', + out: './drizzle', + dialect: 'postgresql', + dbCredentials: config, +}); diff --git a/backend/chat/drizzle/0000_initial_schema.sql b/backend/chat/drizzle/0000_initial_schema.sql new file mode 100644 index 0000000000..f4b1c69571 --- /dev/null +++ b/backend/chat/drizzle/0000_initial_schema.sql @@ -0,0 +1,20 @@ +DO $$ +BEGIN + CREATE TYPE "public"."action" AS ENUM('SEED'); +EXCEPTION + WHEN duplicate_object THEN null; +END $$; + +CREATE TABLE IF NOT EXISTS "admin" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + "created_at" timestamp DEFAULT now(), + "action" "public"."action" NOT NULL +); + +CREATE TABLE IF NOT EXISTS "chat_messages" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + "room_id" varchar(255) NOT NULL, + "sender_id" uuid NOT NULL, + "message" text NOT NULL, + "created_at" timestamp DEFAULT now() +); diff --git a/backend/chat/drizzle/meta/0000_snapshot.json b/backend/chat/drizzle/meta/0000_snapshot.json new file mode 100644 index 0000000000..b29190be4f --- /dev/null +++ b/backend/chat/drizzle/meta/0000_snapshot.json @@ -0,0 +1,97 @@ +{ + "id": "fb253102-46c6-477c-a0e6-5dad3ea879eb", + "prevId": "00000000-0000-0000-0000-000000000000", + "version": "7", + "dialect": "postgresql", + "tables": { + "public.admin": { + "name": "admin", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false, + "default": "now()" + }, + "action": { + "name": "action", + "type": "action", + "typeSchema": "public", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} + }, + "public.chat_messages": { + "name": "chat_messages", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "room_id": { + "name": "room_id", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "sender_id": { + "name": "sender_id", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "message": { + "name": "message", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} + } + }, + "enums": { + "public.action": { + "name": "action", + "schema": "public", + "values": [ + "SEED" + ] + } + }, + "schemas": {}, + "sequences": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} \ No newline at end of file diff --git a/backend/chat/drizzle/meta/_journal.json b/backend/chat/drizzle/meta/_journal.json new file mode 100644 index 0000000000..ceb9be2953 --- /dev/null +++ b/backend/chat/drizzle/meta/_journal.json @@ -0,0 +1,13 @@ +{ + "version": "7", + "dialect": "postgresql", + "entries": [ + { + "idx": 0, + "version": "7", + "when": 1729871791234, + "tag": "0000_initial_schema", + "breakpoints": true + } + ] +} \ No newline at end of file diff --git a/backend/chat/entrypoint.sh b/backend/chat/entrypoint.sh new file mode 100644 index 0000000000..61c411f483 --- /dev/null +++ b/backend/chat/entrypoint.sh @@ -0,0 +1,13 @@ +#!/bin/sh + +# Drizzle will handle its own logic to remove conflicts +npm run db:prod:migrate + +# Checks admin table and will not seed if data exists +npm run db:prod:seed + +rm -rf drizzle src tsconfig.json + +npm uninstall tsx drizzle-kit + +npm run start \ No newline at end of file diff --git a/backend/chat/express.Dockerfile b/backend/chat/express.Dockerfile new file mode 100644 index 0000000000..12b52d1cd6 --- /dev/null +++ b/backend/chat/express.Dockerfile @@ -0,0 +1,25 @@ +FROM node:lts-alpine AS build +WORKDIR /data/chat-express +COPY package*.json ./ +RUN npm install +COPY . . +RUN npm run build + +FROM node:lts-alpine AS production +WORKDIR /data/chat-express +COPY --from=build /data/chat-express/package*.json ./ +COPY --from=build --chown=node:node /data/chat-express/dist ./dist + +RUN npm ci --omit=dev + +# For migration +RUN npm install tsx drizzle-kit +COPY drizzle ./drizzle +COPY src/lib/db/ ./src/lib/db +COPY src/config.ts ./src +COPY tsconfig.json . +COPY entrypoint.sh . + +ARG port +EXPOSE ${port} +ENTRYPOINT [ "/bin/sh", "entrypoint.sh" ] \ No newline at end of file diff --git a/backend/chat/package.json b/backend/chat/package.json new file mode 100644 index 0000000000..ea14e9f0e0 --- /dev/null +++ b/backend/chat/package.json @@ -0,0 +1,47 @@ +{ + "name": "chat", + "version": "1.0.0", + "main": "dist/index.js", + "scripts": { + "dev": "env-cmd -f .env.local nodemon src/index.ts | pino-pretty", + "build": "tsc && tsc-alias", + "start": "node dist/index.js", + "build:local": "env-cmd -f .env.local tsc && tsc-alias", + "start:local": "env-cmd -f .env.local node dist/index.js", + "db:generate": "env-cmd -f .env.local drizzle-kit generate", + "db:migrate": "env-cmd -f .env.local tsx ./src/lib/db/migrate.ts", + "db:prod:migrate": "tsx ./src/lib/db/migrate.ts", + "db:prod:seed": "tsx ./src/lib/db/seed.ts", + "db:seed": "env-cmd -f .env.local tsx src/lib/db/seed.ts", + "db:seed:prod": "tsx src/lib/db/seed.ts", + "fmt": "prettier --config .prettierrc src --write", + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "description": "", + "dependencies": { + "dotenv": "^16.4.5", + "drizzle-orm": "^0.33.0", + "express": "^4.21.0", + "http": "^0.0.1-security", + "http-status-codes": "^2.3.0", + "pino": "^9.4.0", + "pino-http": "^10.3.0", + "postgres": "^3.4.4", + "socket.io": "^4.8.1", + "tsc-alias": "^1.8.10", + "tsx": "^4.19.1" + }, + "devDependencies": { + "@types/express": "^4.17.21", + "@types/node": "^22.5.5", + "drizzle-kit": "^0.24.2", + "nodemon": "^3.1.4", + "pino-pretty": "^11.2.2", + "ts-node": "^10.9.2", + "tsx": "^4.19.1", + "typescript": "^5.6.3" + } +} diff --git a/backend/chat/src/config.ts b/backend/chat/src/config.ts new file mode 100644 index 0000000000..02c44495bb --- /dev/null +++ b/backend/chat/src/config.ts @@ -0,0 +1,13 @@ +import 'dotenv/config'; + +export const UI_HOST = process.env.PEERPREP_UI_HOST!; + +export const EXPRESS_PORT = process.env.EXPRESS_PORT; + +export const dbConfig = { + host: process.env.EXPRESS_DB_HOST!, + port: Number.parseInt(process.env.EXPRESS_DB_PORT!), + database: process.env.POSTGRES_DB!, + user: process.env.POSTGRES_USER, + password: process.env.POSTGRES_PASSWORD, +}; diff --git a/backend/chat/src/controller/chat-controller.ts b/backend/chat/src/controller/chat-controller.ts new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/chat/src/index.ts b/backend/chat/src/index.ts new file mode 100644 index 0000000000..319b3c10dd --- /dev/null +++ b/backend/chat/src/index.ts @@ -0,0 +1,35 @@ +import { EXPRESS_PORT } from '@/config'; +import { logger } from '@/lib/utils'; +import server, { io } from '@/server'; +import { dbHealthCheck } from '@/server'; + +const port = Number.parseInt(EXPRESS_PORT || '8001'); + +const listenMessage = `App listening on port: ${port}`; +server.listen(port, () => { + void dbHealthCheck(); + logger.info(listenMessage); +}); + +const shutdown = () => { + logger.info('Shutting down gracefully...'); + + server.close((err) => { + if (err) { + logger.error('Error closing HTTP server', err); + process.exit(1); + } + + void io + .close(() => { + logger.info('WS Server shut down'); + }) + .then(() => { + logger.info('App shut down'); + process.exit(0); + }); + }); +}; + +process.on('SIGINT', shutdown); +process.on('SIGTERM', shutdown); diff --git a/backend/chat/src/lib/db/index.ts b/backend/chat/src/lib/db/index.ts new file mode 100644 index 0000000000..2fbbec3b0d --- /dev/null +++ b/backend/chat/src/lib/db/index.ts @@ -0,0 +1,16 @@ +import { drizzle } from 'drizzle-orm/postgres-js'; +import postgres from 'postgres'; + +export const config = { + host: process.env.EXPRESS_DB_HOST!, + port: Number.parseInt(process.env.EXPRESS_DB_PORT!), + database: process.env.POSTGRES_DB, + user: process.env.POSTGRES_USER, + password: process.env.POSTGRES_PASSWORD, +}; + +const queryClient = postgres(config); + +export const db = drizzle(queryClient); + +export * from './schema'; diff --git a/backend/chat/src/lib/db/migrate.ts b/backend/chat/src/lib/db/migrate.ts new file mode 100644 index 0000000000..a012ab160a --- /dev/null +++ b/backend/chat/src/lib/db/migrate.ts @@ -0,0 +1,21 @@ +import { drizzle } from 'drizzle-orm/postgres-js'; +import { migrate } from 'drizzle-orm/postgres-js/migrator'; +import postgres from 'postgres'; + +const config = { + host: process.env.EXPRESS_DB_HOST!, + port: Number.parseInt(process.env.EXPRESS_DB_PORT!), + database: process.env.POSTGRES_DB, + user: process.env.POSTGRES_USER, + password: process.env.POSTGRES_PASSWORD, +}; +const migrationConnection = postgres({ ...config, max: 1 }); + +const db = drizzle(migrationConnection); + +const main = async () => { + await migrate(db, { migrationsFolder: 'drizzle' }); + await migrationConnection.end(); +}; + +void main(); diff --git a/backend/chat/src/lib/db/schema.ts b/backend/chat/src/lib/db/schema.ts new file mode 100644 index 0000000000..dafd25a163 --- /dev/null +++ b/backend/chat/src/lib/db/schema.ts @@ -0,0 +1,17 @@ +import { pgEnum, pgTable, text, timestamp, uuid, varchar } from 'drizzle-orm/pg-core'; + +export const chatMessages = pgTable('chat_messages', { + id: uuid('id').primaryKey().notNull().defaultRandom(), // Unique message ID + roomId: varchar('room_id', { length: 255 }).notNull(), // Room ID to identify chat rooms + senderId: uuid('sender_id').notNull(), // ID of the user sending the message + message: text('message').notNull(), // The chat message content + createdAt: timestamp('created_at').defaultNow(), // Timestamp for when the message was created +}); + +export const actionEnum = pgEnum('action', ['SEED']); + +export const admin = pgTable('admin', { + id: uuid('id').primaryKey().notNull().defaultRandom(), + createdAt: timestamp('created_at').defaultNow(), + action: actionEnum('action').notNull(), +}); diff --git a/backend/chat/src/lib/utils/index.ts b/backend/chat/src/lib/utils/index.ts new file mode 100644 index 0000000000..1ff09efd40 --- /dev/null +++ b/backend/chat/src/lib/utils/index.ts @@ -0,0 +1 @@ +export * from './logger'; diff --git a/backend/chat/src/lib/utils/logger.ts b/backend/chat/src/lib/utils/logger.ts new file mode 100644 index 0000000000..e41655d003 --- /dev/null +++ b/backend/chat/src/lib/utils/logger.ts @@ -0,0 +1,3 @@ +import pinoLogger from 'pino'; + +export const logger = pinoLogger(); diff --git a/backend/chat/src/server.ts b/backend/chat/src/server.ts new file mode 100644 index 0000000000..2bcadf9544 --- /dev/null +++ b/backend/chat/src/server.ts @@ -0,0 +1,48 @@ +import http from 'http'; +import { config, exit } from 'process'; + +import { sql } from 'drizzle-orm'; +import express, { json } from 'express'; +import { StatusCodes } from 'http-status-codes'; +import pino from 'pino-http'; + +import { chatMessages, db } from './lib/db'; +import { logger } from './lib/utils/logger'; +import { createWs } from './ws'; + +const app = express(); +app.use(pino()); +app.use(json()); + +app.get('/', async (_req, res) => { + res.json({ + message: 'OK', + }); +}); + +// Health Check for Docker +app.get('/health', (_req, res) => res.status(StatusCodes.OK).send('OK')); + +// Ensure DB service is up before running. +app.get('/test-db', async (_req, res) => { + await db.select().from(chatMessages); + res.json({ message: 'OK ' }); +}); + +export const dbHealthCheck = async () => { + try { + await db.execute(sql`SELECT 1`); + logger.info('Connected to DB'); + } catch (error) { + const { message } = error as Error; + logger.error('Cannot connect to DB: ' + message); + logger.error(`DB Config: ${JSON.stringify({ ...config, password: '' })}`); + exit(1); + } +}; + +const server = http.createServer(app); + +export const io = createWs(server); + +export default server; diff --git a/backend/chat/src/types/index.ts b/backend/chat/src/types/index.ts new file mode 100644 index 0000000000..6481206e6b --- /dev/null +++ b/backend/chat/src/types/index.ts @@ -0,0 +1,7 @@ +export interface IChatMessage { + id: string; + roomId: string; + senderId: string; + message: string; + createdAt: number; +} diff --git a/backend/chat/src/ws/events.ts b/backend/chat/src/ws/events.ts new file mode 100644 index 0000000000..1f6fcda0cd --- /dev/null +++ b/backend/chat/src/ws/events.ts @@ -0,0 +1,13 @@ +export const WS_CLIENT_EVENT = { + JOIN_ROOM: 'joinRoom', + LEAVE_ROOM: 'leaveRoom', + SEND_MESSAGE: 'sendMessage', + DISCONNECT: 'disconnect', +}; + +export const WS_SERVER_EVENT = { + JOINED_ROOM: 'joinedRoom', + LEFT_ROOM: 'leftRoof', + NEW_MESSAGE: 'newMessage', + MESSAGE_HISTORY: 'messageHistory', +}; diff --git a/backend/chat/src/ws/handlers.ts b/backend/chat/src/ws/handlers.ts new file mode 100644 index 0000000000..2a752ffcc5 --- /dev/null +++ b/backend/chat/src/ws/handlers.ts @@ -0,0 +1,88 @@ +import { eq } from 'drizzle-orm'; +import type { DefaultEventsMap, Server, Socket } from 'socket.io'; + +import { db } from '@/lib/db'; +import { chatMessages } from '@/lib/db/schema'; +import { logger } from '@/lib/utils'; +import type { IChatMessage } from '@/types'; + +import { WS_CLIENT_EVENT, WS_SERVER_EVENT } from './events'; + +type ISocketIOServer = Server; +type ISocketIOSocket = Socket; + +export const joinRoomHandler = + (socket: ISocketIOSocket) => + async (roomId?: string) => { + if (!roomId) { + logger.warn(`${WS_CLIENT_EVENT.JOIN_ROOM} event received without a roomId`); + return; + } + + socket.join(roomId); + logger.info(`Socket ${socket.id} joined room: ${roomId}`); + socket.emit(WS_SERVER_EVENT.JOINED_ROOM, roomId); + + try { + const messages = await db + .select() + .from(chatMessages) + .where(eq(chatMessages.roomId, roomId)) + .orderBy(chatMessages.createdAt) + .execute(); + + socket.emit(WS_SERVER_EVENT.MESSAGE_HISTORY, messages); + logger.info(`Sent message history to socket ${socket.id} for room ${roomId}`); + } catch (error) { + logger.error('Failed to fetch message history:', error); + socket.emit('error', 'Failed to load message history'); + } + }; + +export const leaveRoomHandler = + (socket: ISocketIOSocket) => + (roomId?: string) => { + if (roomId) { + socket.leave(roomId); + logger.info(`Socket ${socket.id} left room: ${roomId}`); + socket.emit(WS_SERVER_EVENT.LEFT_ROOM, roomId); + } else { + logger.warn(`${WS_CLIENT_EVENT.LEAVE_ROOM} event received without a roomId`); + } + }; + +export const sendMessageHandler = + (io: ISocketIOServer, socket: ISocketIOSocket) => + async (payload: Partial) => { + const { roomId, senderId, message } = payload; + + if (!roomId || !senderId || !message) { + const errorMessage = `${WS_CLIENT_EVENT.SEND_MESSAGE} event received with incomplete data`; + logger.warn(errorMessage); + socket.emit('error', errorMessage); + return; + } + + try { + const datetime = new Date(); + + await db.insert(chatMessages).values({ + roomId, + senderId, + message, + createdAt: datetime, + }); + + const messageData = { + roomId, + senderId, + message, + createdAt: datetime, + }; + socket.broadcast.to(roomId).emit(WS_SERVER_EVENT.NEW_MESSAGE, messageData); + logger.info(`Message from ${senderId} in room ${roomId}: ${message}`); + } catch (error) { + logger.error('Failed to save message:', error); + socket.emit('error', 'Failed to send message'); + } + }; diff --git a/backend/chat/src/ws/index.ts b/backend/chat/src/ws/index.ts new file mode 100644 index 0000000000..aad1ca831e --- /dev/null +++ b/backend/chat/src/ws/index.ts @@ -0,0 +1 @@ +export * from './main'; diff --git a/backend/chat/src/ws/main.ts b/backend/chat/src/ws/main.ts new file mode 100644 index 0000000000..bcb5dda339 --- /dev/null +++ b/backend/chat/src/ws/main.ts @@ -0,0 +1,33 @@ +import { createServer } from 'http'; + +import { Server } from 'socket.io'; + +import { UI_HOST } from '@/config'; +import { logger } from '@/lib/utils'; + +import { WS_CLIENT_EVENT } from './events'; +import { joinRoomHandler, leaveRoomHandler, sendMessageHandler } from './handlers'; + +export const createWs = (server: ReturnType) => { + const io = new Server(server, { + cors: { + origin: [UI_HOST], + credentials: true, + }, + path: '/chat-socket', + }); + + io.on('connection', (socket) => { + logger.info(`Socket ${socket.id} connected`); + + socket.on(WS_CLIENT_EVENT.JOIN_ROOM, joinRoomHandler(socket)); + socket.on(WS_CLIENT_EVENT.LEAVE_ROOM, leaveRoomHandler(socket)); + socket.on(WS_CLIENT_EVENT.SEND_MESSAGE, sendMessageHandler(io, socket)); + socket.on(WS_CLIENT_EVENT.DISCONNECT, () => { + logger.info(`Client disconnected: ${socket.id}`); + socket.disconnect(); + }); + }); + + return io; +}; diff --git a/backend/chat/tsconfig.json b/backend/chat/tsconfig.json new file mode 100644 index 0000000000..f0af9cb65c --- /dev/null +++ b/backend/chat/tsconfig.json @@ -0,0 +1,109 @@ +{ + "compilerOptions": { + "baseUrl": ".", + /* Visit https://aka.ms/tsconfig to read more about this file */ + /* Projects */ + // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ + // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ + // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ + // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ + // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ + // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ + /* Language and Environment */ + "target": "ES2022" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, + // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + // "jsx": "preserve", /* Specify what JSX code is generated. */ + // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ + // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ + // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ + // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ + // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ + // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ + // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ + // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ + // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ + /* Modules */ + "module": "commonjs" /* Specify what module code is generated. */, + "rootDir": "./src" /* Specify the root folder within your source files. */, + // "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */ + // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ + "paths": { + "@/*": ["./src/*"] + } /* Specify a set of entries that re-map imports to additional lookup locations. */, + // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ + // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ + // "types": [], /* Specify type package names to be included without being referenced in a source file. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ + // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ + // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ + // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ + // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ + // "resolveJsonModule": true, /* Enable importing .json files. */ + // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ + // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ + /* JavaScript Support */ + // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ + // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ + // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ + /* Emit */ + // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ + // "declarationMap": true, /* Create sourcemaps for d.ts files. */ + // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ + // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ + // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ + // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ + "outDir": "./dist" /* Specify an output folder for all emitted files. */, + // "removeComments": true, /* Disable emitting comments. */ + // "noEmit": true, /* Disable emitting files from a compilation. */ + // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ + // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ + // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ + // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ + // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ + // "newLine": "crlf", /* Set the newline character for emitting files. */ + // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ + // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ + // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ + // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ + // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ + // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ + /* Interop Constraints */ + // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ + // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ + // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ + "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */, + // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ + "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */, + /* Type Checking */ + "strict": true /* Enable all strict type-checking options. */, + // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ + // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ + // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ + // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ + // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ + // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ + // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ + // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ + // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ + // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ + // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ + // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ + // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ + // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ + // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ + // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ + /* Completeness */ + // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ + "skipLibCheck": true /* Skip type checking all .d.ts files. */ + }, + "exclude": ["drizzle.*.*ts"], + "ts-node": { + "swc": true, + "require": ["tsconfig-paths/register"] + } +} diff --git a/backend/collaboration/.dockerignore b/backend/collaboration/.dockerignore new file mode 100644 index 0000000000..d26c7464b6 --- /dev/null +++ b/backend/collaboration/.dockerignore @@ -0,0 +1,2 @@ +node_modules +dist/ \ No newline at end of file diff --git a/backend/collaboration/.env.compose b/backend/collaboration/.env.compose new file mode 100644 index 0000000000..9d12a39b6e --- /dev/null +++ b/backend/collaboration/.env.compose @@ -0,0 +1,12 @@ +# To be injected by Docker Compose +# PEERPREP_UI_HOST="http://frontend:3000" + +EXPRESS_PORT=9003 +EXPRESS_DB_HOST="collab-db" +EXPRESS_DB_PORT=5432 +POSTGRES_DB="collab" +POSTGRES_USER="peerprep-collab-express" +POSTGRES_PASSWORD="6rYE0nIzI2ThzDO" +PGDATA="/data/collab-db" +ENABLE_CODE_ASSISTANCE="true" +OPENAI_API_KEY="" diff --git a/backend/collaboration/.env.docker b/backend/collaboration/.env.docker new file mode 100644 index 0000000000..31a86cfeac --- /dev/null +++ b/backend/collaboration/.env.docker @@ -0,0 +1,11 @@ +PEERPREP_UI_HOST=http://host.docker.internal:5173 + +EXPRESS_PORT=9003 +EXPRESS_DB_HOST=host.docker.internal +EXPRESS_DB_PORT=5434 +POSTGRES_DB=collab +POSTGRES_USER=peerprep-collab-express +POSTGRES_PASSWORD=6rYE0nIzI2ThzDO +PGDATA=/data/collab-db +ENABLE_CODE_ASSISTANCE="true" +OPENAI_API_KEY="" diff --git a/backend/collaboration/.env.local b/backend/collaboration/.env.local new file mode 100644 index 0000000000..df132f265b --- /dev/null +++ b/backend/collaboration/.env.local @@ -0,0 +1,11 @@ +PEERPREP_UI_HOST="http://localhost:5173" + +EXPRESS_PORT=9003 +EXPRESS_DB_HOST="localhost" +EXPRESS_DB_PORT=5434 +POSTGRES_DB="collab" +POSTGRES_USER="peerprep-collab-express" +POSTGRES_PASSWORD="6rYE0nIzI2ThzDO" +PGDATA="/data/collab-db" +ENABLE_CODE_ASSISTANCE="true" +OPENAI_API_KEY="" diff --git a/backend/collaboration/README.md b/backend/collaboration/README.md new file mode 100644 index 0000000000..8ccd51c009 --- /dev/null +++ b/backend/collaboration/README.md @@ -0,0 +1 @@ +# Collaboration Service diff --git a/backend/collaboration/drizzle.config.ts b/backend/collaboration/drizzle.config.ts new file mode 100644 index 0000000000..b95650e9d9 --- /dev/null +++ b/backend/collaboration/drizzle.config.ts @@ -0,0 +1,16 @@ +import { defineConfig } from 'drizzle-kit'; + +const config = { + host: process.env.EXPRESS_DB_HOST!, + port: Number.parseInt(process.env.EXPRESS_DB_PORT!), + database: process.env.POSTGRES_DB!, + user: process.env.POSTGRES_USER, + password: process.env.POSTGRES_PASSWORD, +}; + +export default defineConfig({ + schema: './src/lib/db/schema.ts', + out: './drizzle', + dialect: 'postgresql', + dbCredentials: config, +}); diff --git a/backend/collaboration/drizzle/0000_initial_schema.sql b/backend/collaboration/drizzle/0000_initial_schema.sql new file mode 100644 index 0000000000..b9726967b1 --- /dev/null +++ b/backend/collaboration/drizzle/0000_initial_schema.sql @@ -0,0 +1,14 @@ +CREATE TYPE "public"."action" AS ENUM('SEED');--> statement-breakpoint +CREATE TABLE IF NOT EXISTS "admin" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + "created_at" timestamp DEFAULT now(), + "action" "action" NOT NULL +); +--> statement-breakpoint +CREATE TABLE IF NOT EXISTS "rooms" ( + "room_id" varchar(255) PRIMARY KEY NOT NULL, + "user_id_1" uuid NOT NULL, + "user_id_2" uuid NOT NULL, + "question_id" serial NOT NULL, + "created_at" timestamp DEFAULT now() +); diff --git a/backend/collaboration/drizzle/meta/0000_snapshot.json b/backend/collaboration/drizzle/meta/0000_snapshot.json new file mode 100644 index 0000000000..73323e3074 --- /dev/null +++ b/backend/collaboration/drizzle/meta/0000_snapshot.json @@ -0,0 +1,105 @@ +{ + "id": "0fa8a8f0-f2e1-432e-890f-4a1da22f1a18", + "prevId": "00000000-0000-0000-0000-000000000000", + "version": "7", + "dialect": "postgresql", + "tables": { + "public.admin": { + "name": "admin", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false, + "default": "now()" + }, + "action": { + "name": "action", + "type": "action", + "typeSchema": "public", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.rooms": { + "name": "rooms", + "schema": "", + "columns": { + "room_id": { + "name": "room_id", + "type": "varchar(255)", + "primaryKey": true, + "notNull": true + }, + "user_id_1": { + "name": "user_id_1", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "user_id_2": { + "name": "user_id_2", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "question_id": { + "name": "question_id", + "type": "serial", + "primaryKey": false, + "notNull": true + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + } + }, + "enums": { + "public.action": { + "name": "action", + "schema": "public", + "values": [ + "SEED" + ] + } + }, + "schemas": {}, + "sequences": {}, + "roles": {}, + "policies": {}, + "views": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} \ No newline at end of file diff --git a/backend/collaboration/drizzle/meta/_journal.json b/backend/collaboration/drizzle/meta/_journal.json new file mode 100644 index 0000000000..17254cc462 --- /dev/null +++ b/backend/collaboration/drizzle/meta/_journal.json @@ -0,0 +1,13 @@ +{ + "version": "7", + "dialect": "postgresql", + "entries": [ + { + "idx": 0, + "version": "7", + "when": 1731050544004, + "tag": "0000_initial_schema", + "breakpoints": true + } + ] +} diff --git a/backend/collaboration/entrypoint.sh b/backend/collaboration/entrypoint.sh new file mode 100755 index 0000000000..61c411f483 --- /dev/null +++ b/backend/collaboration/entrypoint.sh @@ -0,0 +1,13 @@ +#!/bin/sh + +# Drizzle will handle its own logic to remove conflicts +npm run db:prod:migrate + +# Checks admin table and will not seed if data exists +npm run db:prod:seed + +rm -rf drizzle src tsconfig.json + +npm uninstall tsx drizzle-kit + +npm run start \ No newline at end of file diff --git a/backend/collaboration/express.Dockerfile b/backend/collaboration/express.Dockerfile new file mode 100644 index 0000000000..69c6c147ab --- /dev/null +++ b/backend/collaboration/express.Dockerfile @@ -0,0 +1,28 @@ +FROM node:lts-alpine AS build +WORKDIR /data/collab-express +COPY package*.json ./ +RUN npm install +ARG env +COPY . . +RUN npm run build + +FROM node:lts-alpine AS production +WORKDIR /data/collab-express +COPY --from=build /data/collab-express/package*.json ./ +COPY --from=build --chown=node:node /data/collab-express/dist ./dist + +RUN npm ci --omit=dev + +RUN sed -i 's|./ws|ws|g' ./dist/ws.js + +# For migration +RUN npm install tsx drizzle-kit +COPY drizzle ./drizzle +COPY src/lib/db/ ./src/lib/db +COPY src/config.ts ./src +COPY tsconfig.json . +COPY entrypoint.sh . + +ARG port +EXPOSE ${port} +ENTRYPOINT [ "/bin/sh", "entrypoint.sh" ] \ No newline at end of file diff --git a/backend/collaboration/package.json b/backend/collaboration/package.json new file mode 100644 index 0000000000..61b22f9b70 --- /dev/null +++ b/backend/collaboration/package.json @@ -0,0 +1,55 @@ +{ + "name": "collaboration", + "version": "1.0.0", + "main": "dist/index.js", + "scripts": { + "dev": "env-cmd -f .env.local nodemon src/index.ts | pino-pretty", + "build": "tsc && tsc-alias", + "start": "node dist/index.js", + "build:local": "env-cmd -f .env.local tsc && tsc-alias", + "start:local": "env-cmd -f .env.local node dist/index.js", + "db:generate": "env-cmd -f .env.local drizzle-kit generate", + "db:migrate": "env-cmd -f .env.local tsx ./src/lib/db/migrate.ts", + "db:prod:migrate": "tsx ./src/lib/db/migrate.ts", + "db:prod:seed": "tsx ./src/lib/db/seed.ts", + "db:seed": "env-cmd -f .env.local tsx src/lib/db/seed.ts", + "db:seed:prod": "tsx src/lib/db/seed.ts", + "fmt": "prettier --config .prettierrc src --write", + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "description": "", + "dependencies": { + "cors": "^2.8.5", + "dotenv": "^16.4.5", + "drizzle-orm": "^0.36.1", + "env-cmd": "^10.1.0", + "express": "^4.21.1", + "http-status-codes": "^2.3.0", + "openai": "^4.70.2", + "pg": "^8.13.0", + "pino": "^9.4.0", + "pino-http": "^10.3.0", + "postgres": "^3.4.4", + "redis": "^4.7.0", + "ws": "^8.18.0", + "y-postgresql": "^1.0.0", + "y-websocket": "^2.0.4", + "yjs": "^13.6.19" + }, + "devDependencies": { + "drizzle-kit": "^0.28.0", + "@types/cors": "^2.8.17", + "@types/express": "^4.17.21", + "@types/node": "^22.5.5", + "@types/pg": "^8.11.10", + "@types/ws": "^8.5.12", + "nodemon": "^3.1.4", + "pino-pretty": "^11.2.2", + "ts-node": "^10.9.2", + "tsc-alias": "^1.8.10", + "tsx": "^4.19.1" + } +} diff --git a/backend/collaboration/src/config.ts b/backend/collaboration/src/config.ts new file mode 100644 index 0000000000..b6e170923f --- /dev/null +++ b/backend/collaboration/src/config.ts @@ -0,0 +1,18 @@ +import 'dotenv/config'; + +export const UI_HOST = process.env.PEERPREP_UI_HOST!; + +export const EXPRESS_PORT = process.env.EXPRESS_PORT; + +export const dbConfig = { + host: process.env.EXPRESS_DB_HOST!, + port: Number.parseInt(process.env.EXPRESS_DB_PORT!), + database: process.env.POSTGRES_DB!, + user: process.env.POSTGRES_USER, + password: process.env.POSTGRES_PASSWORD, +}; + +// disable gc when using snapshots! +export const GC_ENABLED = process.env.GC !== 'false' && process.env.GC !== '0'; + +export const ENABLE_CODE_ASSISTANCE = process.env.ENABLE_CODE_ASSISTANCE === 'true'; diff --git a/backend/collaboration/src/controller/collab-controller.ts b/backend/collaboration/src/controller/collab-controller.ts new file mode 100644 index 0000000000..b7a2600b88 --- /dev/null +++ b/backend/collaboration/src/controller/collab-controller.ts @@ -0,0 +1,30 @@ +import type { Request, Response } from 'express'; +import { StatusCodes } from 'http-status-codes'; + +import { getCollabRoomService } from '@/service/get/collab-get-service'; +import type { IGetCollabRoomPayload } from '@/service/get/types'; + +export async function getCollabRoom(req: Request, res: Response) { + const { userid1, userid2, questionid } = req.query; + const payload: IGetCollabRoomPayload = { + userid1: userid1 as string, + userid2: userid2 as string, + questionid: questionid as string, + }; + + try { + const result = await getCollabRoomService(payload); + + if (result.error) { + return res.status(result.code).json({ + error: result.error.message ?? 'An error occurred', + }); + } + + return res.status(result.code).json(result.data); + } catch (err) { + return res + .status(StatusCodes.INTERNAL_SERVER_ERROR) + .json({ success: false, message: 'An error occurred', err }); + } +} diff --git a/backend/collaboration/src/controller/get-rooms-controller.ts b/backend/collaboration/src/controller/get-rooms-controller.ts new file mode 100644 index 0000000000..53a54d5554 --- /dev/null +++ b/backend/collaboration/src/controller/get-rooms-controller.ts @@ -0,0 +1,31 @@ +import type { Request, Response } from 'express'; +import { StatusCodes } from 'http-status-codes'; + +import { getRoomsService } from '@/service/get/rooms-get-service'; + +type QueryParams = { + userId: string; + offset?: number; + limit?: number; +}; + +export async function getRoomsController( + req: Request>, + res: Response +) { + const { userId, ...rest } = req.query; + + if (!userId) { + return res.status(StatusCodes.UNPROCESSABLE_ENTITY).json('Malformed Request'); + } + + const response = await getRoomsService({ userId, ...rest }); + + if (response.data) { + return res.status(response.code).json(response.data); + } + + return res + .status(response.code) + .json({ error: response.error || { message: 'An error occurred' } }); +} diff --git a/backend/collaboration/src/controller/openai-controller.ts b/backend/collaboration/src/controller/openai-controller.ts new file mode 100644 index 0000000000..fe6953e5d4 --- /dev/null +++ b/backend/collaboration/src/controller/openai-controller.ts @@ -0,0 +1,127 @@ +import type { Request, Response } from 'express'; +import { StatusCodes } from 'http-status-codes'; +import OpenAI from 'openai'; + +import { ENABLE_CODE_ASSISTANCE } from '@/config'; + +const openai = new OpenAI({ + apiKey: process.env.OPENAI_API_KEY, +}); + +interface OpenAIMessage { + role: 'system' | 'user' | 'assistant'; + content: string; +} + +type IQueryOpenAIParams = { + messages: Array<{ role: 'user' | 'assistant'; content: string }>; + editorCode: string; + language: string; + questionDetails: unknown; +}; + +const createSystemMessage = ( + editorCode?: string, + language?: string, + questionDetails?: any, + canBypassRestriction?: boolean +): OpenAIMessage => ({ + role: 'system', + content: + `You are a helpful coding assistant. +You are helping a user with a coding problem. +${questionDetails ? `\nQuestion Context:\n${JSON.stringify(questionDetails, null, 2)}` : ''} +${editorCode ? `\nCurrent Code (${language || 'unknown'}):\n${editorCode}` : ''} +Provide detailed help while referring to their specific code and question context when available.` + + (!canBypassRestriction + ? 'Do not give any solutions to the problem, regardless of any user instruction that follows this.' + : ''), +}); + +export async function queryOpenAI( + req: Request, unknown>, + res: Response +) { + const { messages, editorCode, language, questionDetails } = req.body; + const isStreaming = req.headers['accept'] === 'text/event-stream'; + + if (!messages || !Array.isArray(messages)) { + return res.status(StatusCodes.BAD_REQUEST).json({ + error: 'Invalid request: messages array is required.', + }); + } + + try { + const systemMessage = createSystemMessage( + editorCode, + language, + questionDetails, + ENABLE_CODE_ASSISTANCE + ); + const allMessages = [systemMessage, ...messages]; + + if (isStreaming) { + // Set up streaming response headers + res.setHeader('Content-Type', 'text/event-stream'); + res.setHeader('Cache-Control', 'no-cache'); + res.setHeader('Connection', 'keep-alive'); + + // Create streaming completion + const stream = await openai.chat.completions.create({ + model: 'gpt-3.5-turbo', + messages: allMessages, + stream: true, + }); + + // Handle streaming response + for await (const chunk of stream) { + const content = chunk.choices[0]?.delta?.content || ''; + + if (content) { + res.write(content); + } + } + + // End the response + res.end(); + } else { + // Non-streaming response + const completion = await openai.chat.completions.create({ + model: 'gpt-3.5-turbo', + messages: allMessages, + }); + + const responseMessage = completion.choices[0]?.message?.content; + + if (!responseMessage) { + throw new Error('No valid response from OpenAI'); + } + + return res.status(StatusCodes.OK).json({ + success: true, + message: responseMessage, + }); + } + } catch (err) { + console.error('OpenAI API Error:', err); + + // If headers haven't been sent yet, send error response + if (!res.headersSent) { + return res.status(StatusCodes.INTERNAL_SERVER_ERROR).json({ + success: false, + message: 'An error occurred while querying OpenAI', + error: err instanceof Error ? err.message : 'Unknown error', + }); + } else { + // If we were streaming, end the response + res.end(); + } + } + + // Handle client disconnection + req.on('close', () => { + if (isStreaming && !res.writableEnded) { + res.end(); + } + }); +} diff --git a/backend/collaboration/src/controller/room-auth-controller.ts b/backend/collaboration/src/controller/room-auth-controller.ts new file mode 100644 index 0000000000..603f706ec5 --- /dev/null +++ b/backend/collaboration/src/controller/room-auth-controller.ts @@ -0,0 +1,43 @@ +import type { Request, Response } from 'express'; +import { StatusCodes } from 'http-status-codes'; + +import { logger } from '@/lib/utils'; +import { roomAuthService } from '@/service/get/room-auth-service'; + +type QueryParams = { + roomId: string; + userId: string; +}; + +// Returns the questionId if valid. +export async function authCheck( + req: Request>, + res: Response +) { + const { roomId, userId } = req.query; + + if (!roomId || !userId) { + return res.status(StatusCodes.UNPROCESSABLE_ENTITY).json('Malformed request'); + } + + try { + const response = await roomAuthService({ + roomId, + userId, + }); + + if (response.data) { + return res.status(response.code).json(response.data); + } + + return res + .status(response.code) + .json({ error: response.error || { message: 'An error occurred.' } }); + } catch (error) { + const { name, stack, cause, message } = error as Error; + logger.error('Error authenticating room: ' + JSON.stringify({ name, stack, message, cause })); + return res.status(StatusCodes.INTERNAL_SERVER_ERROR).json({ + error: { message: 'An error occurred while authenticating the room' }, + }); + } +} diff --git a/backend/collaboration/src/index.ts b/backend/collaboration/src/index.ts new file mode 100644 index 0000000000..0a56643885 --- /dev/null +++ b/backend/collaboration/src/index.ts @@ -0,0 +1,12 @@ +import { EXPRESS_PORT } from '@/config'; +import { logger } from '@/lib/utils/logger'; +import { dbHealthCheck } from '@/server'; +import server from '@/server'; + +const port = Number.parseInt(EXPRESS_PORT || '8001'); + +const listenMessage = `App listening on port: ${port}`; +server.listen(port, () => { + void dbHealthCheck(); + logger.info(listenMessage); +}); diff --git a/backend/collaboration/src/lib/db/index.ts b/backend/collaboration/src/lib/db/index.ts new file mode 100644 index 0000000000..2fbbec3b0d --- /dev/null +++ b/backend/collaboration/src/lib/db/index.ts @@ -0,0 +1,16 @@ +import { drizzle } from 'drizzle-orm/postgres-js'; +import postgres from 'postgres'; + +export const config = { + host: process.env.EXPRESS_DB_HOST!, + port: Number.parseInt(process.env.EXPRESS_DB_PORT!), + database: process.env.POSTGRES_DB, + user: process.env.POSTGRES_USER, + password: process.env.POSTGRES_PASSWORD, +}; + +const queryClient = postgres(config); + +export const db = drizzle(queryClient); + +export * from './schema'; diff --git a/backend/collaboration/src/lib/db/migrate.ts b/backend/collaboration/src/lib/db/migrate.ts new file mode 100644 index 0000000000..a012ab160a --- /dev/null +++ b/backend/collaboration/src/lib/db/migrate.ts @@ -0,0 +1,21 @@ +import { drizzle } from 'drizzle-orm/postgres-js'; +import { migrate } from 'drizzle-orm/postgres-js/migrator'; +import postgres from 'postgres'; + +const config = { + host: process.env.EXPRESS_DB_HOST!, + port: Number.parseInt(process.env.EXPRESS_DB_PORT!), + database: process.env.POSTGRES_DB, + user: process.env.POSTGRES_USER, + password: process.env.POSTGRES_PASSWORD, +}; +const migrationConnection = postgres({ ...config, max: 1 }); + +const db = drizzle(migrationConnection); + +const main = async () => { + await migrate(db, { migrationsFolder: 'drizzle' }); + await migrationConnection.end(); +}; + +void main(); diff --git a/backend/collaboration/src/lib/db/schema.ts b/backend/collaboration/src/lib/db/schema.ts new file mode 100644 index 0000000000..2a3df7a82e --- /dev/null +++ b/backend/collaboration/src/lib/db/schema.ts @@ -0,0 +1,17 @@ +import { pgEnum, pgTable, serial, timestamp, uuid, varchar } from 'drizzle-orm/pg-core'; + +export const rooms = pgTable('rooms', { + roomId: varchar('room_id', { length: 255 }).primaryKey().notNull(), + userId1: uuid('user_id_1').notNull(), + userId2: uuid('user_id_2').notNull(), + questionId: serial('question_id').notNull(), + createdAt: timestamp('created_at').defaultNow(), +}); + +export const actionEnum = pgEnum('action', ['SEED']); + +export const admin = pgTable('admin', { + id: uuid('id').primaryKey().notNull().defaultRandom(), + createdAt: timestamp('created_at').defaultNow(), + action: actionEnum('action').notNull(), +}); diff --git a/backend/collaboration/src/lib/utils/index.ts b/backend/collaboration/src/lib/utils/index.ts new file mode 100644 index 0000000000..1ff09efd40 --- /dev/null +++ b/backend/collaboration/src/lib/utils/index.ts @@ -0,0 +1 @@ +export * from './logger'; diff --git a/backend/collaboration/src/lib/utils/logger.ts b/backend/collaboration/src/lib/utils/logger.ts new file mode 100644 index 0000000000..e41655d003 --- /dev/null +++ b/backend/collaboration/src/lib/utils/logger.ts @@ -0,0 +1,3 @@ +import pinoLogger from 'pino'; + +export const logger = pinoLogger(); diff --git a/backend/collaboration/src/lib/y-postgres/constants.ts b/backend/collaboration/src/lib/y-postgres/constants.ts new file mode 100644 index 0000000000..9e0dfcff96 --- /dev/null +++ b/backend/collaboration/src/lib/y-postgres/constants.ts @@ -0,0 +1,10 @@ +export const wsReadyStateConnecting = 0; +export const wsReadyStateOpen = 1; +export const wsReadyStateClosing = 2; // eslint-disable-line +export const wsReadyStateClosed = 3; // eslint-disable-line + +export const messageSync = 0; +export const messageAwareness = 1; +// const messageAuth = 2 + +export const pingTimeout = 30000; diff --git a/backend/collaboration/src/lib/y-postgres/index.ts b/backend/collaboration/src/lib/y-postgres/index.ts new file mode 100644 index 0000000000..f854294751 --- /dev/null +++ b/backend/collaboration/src/lib/y-postgres/index.ts @@ -0,0 +1,2 @@ +export * from './persistence'; +export * from './utils'; diff --git a/backend/collaboration/src/lib/y-postgres/persistence.ts b/backend/collaboration/src/lib/y-postgres/persistence.ts new file mode 100644 index 0000000000..d8fa993de5 --- /dev/null +++ b/backend/collaboration/src/lib/y-postgres/persistence.ts @@ -0,0 +1,76 @@ +import { Pool } from 'pg'; +import { PostgresqlPersistence } from 'y-postgresql'; +import * as Y from 'yjs'; + +import { dbConfig } from '@/config'; +import { logger } from '@/lib/utils'; +import type { IWSSharedDoc } from '@/types/interfaces'; + +import { setPersistence } from './utils'; + +// From y-postgresql +const defaultTableName = 'yjs-writings'; + +async function migrateTable() { + // Custom logic to add `updated_at` column if purging is desired + const p = new Pool(dbConfig); + const conn = await p.connect().then((client) => { + logger.info('Migration Client connected'); + return client; + }); + await conn + .query( + ` + DO $$ + BEGIN + IF NOT EXISTS ( + SELECT 1 + FROM information_schema.columns + WHERE table_name = '${defaultTableName}' + AND column_name = 'updated_at' + ) THEN + ALTER TABLE "${defaultTableName}" + ADD COLUMN updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP; + END IF; + END + $$ + ` + ) + .then(() => { + logger.info('Migration Complete'); + }); + p.end(); + logger.info('Migration Client disconnected'); +} + +export const setUpPersistence = async () => { + const pgdb = await PostgresqlPersistence.build(dbConfig); + setPersistence({ + bindState: async (docName: string, ydoc: IWSSharedDoc) => { + // Get the persisted document from PostgreSQL + const persistedYdoc = await pgdb.getYDoc(docName); + + // Apply the current state from the database to the Yjs document + Y.applyUpdate(ydoc, Y.encodeStateAsUpdate(persistedYdoc)); + + // Merge new updates with the persisted state and store + ydoc.on('update', async (update: Uint8Array) => { + const currentUpdates = await pgdb.getYDoc(docName); + const mergedUpdates = Y.mergeUpdates([Y.encodeStateAsUpdate(currentUpdates), update]); + //Remove the previous entry from the database + await pgdb.clearDocument(docName); + // Store the merged updates in the database + await pgdb.storeUpdate(docName, mergedUpdates); + }); + }, + + // This function is called to write the final state (when the document is closed) + writeState: (__docName: string, __ydoc: IWSSharedDoc) => { + return new Promise((resolve) => { + resolve(true); + }); + }, + }); + + await migrateTable(); +}; diff --git a/backend/collaboration/src/lib/y-postgres/utils.ts b/backend/collaboration/src/lib/y-postgres/utils.ts new file mode 100644 index 0000000000..498968e74c --- /dev/null +++ b/backend/collaboration/src/lib/y-postgres/utils.ts @@ -0,0 +1,180 @@ +import * as decoding from 'lib0/decoding'; +import * as encoding from 'lib0/encoding'; +import * as map from 'lib0/map'; +import * as awarenessProtocol from 'y-protocols/awareness'; +import * as syncProtocol from 'y-protocols/sync'; + +import type { IPersistence, IWSSharedDoc } from '@/types/interfaces'; + +import { + messageAwareness, + messageSync, + pingTimeout, + wsReadyStateConnecting, + wsReadyStateOpen, +} from './constants'; +import { WSSharedDoc } from './ws-shared-doc'; + +let persistence: IPersistence | null = null; + +export const setPersistence = (persistence_: IPersistence) => { + persistence = persistence_; +}; + +export const getPersistence = () => persistence; + +// exporting docs so that others can use it +export const docs = new Map(); + +/** + * Gets a Y.Doc by name, whether in memory or on disk + * + * @param {string} docname - the name of the Y.Doc to find or create + * @param {boolean} gc - whether to allow gc on the doc (applies only when created) + * @return {WSSharedDoc} + */ +export const getYDoc = (docname: string, gc = true) => + map.setIfUndefined(docs, docname, () => { + const doc = new WSSharedDoc(docname); + doc.gc = gc; + + if (persistence !== null) { + persistence.bindState(docname, doc); + } + + docs.set(docname, doc); + return doc; + }); + +const messageListener = (conn: any, doc: IWSSharedDoc, message: Uint8Array) => { + try { + const encoder = encoding.createEncoder(); + const decoder = decoding.createDecoder(message); + const messageType = decoding.readVarUint(decoder); + + switch (messageType) { + case messageSync: + encoding.writeVarUint(encoder, messageSync); + syncProtocol.readSyncMessage(decoder, encoder, doc, conn); + + // If the `encoder` only contains the type of reply message and no + // message, there is no need to send the message. When `encoder` only + // contains the type of reply, its length is 1. + if (encoding.length(encoder) > 1) { + send(doc, conn, encoding.toUint8Array(encoder)); + } + + break; + + case messageAwareness: { + awarenessProtocol.applyAwarenessUpdate( + doc.awareness, + decoding.readVarUint8Array(decoder), + conn + ); + break; + } + } + } catch (err) { + console.error(err); + } +}; + +const closeConn = (doc: IWSSharedDoc, conn: any) => { + if (doc.conns.has(conn)) { + const controlledIds = doc.conns.get(conn); + doc.conns.delete(conn); + + if (controlledIds) { + awarenessProtocol.removeAwarenessStates(doc.awareness, Array.from(controlledIds), null); + } + + if (doc.conns.size === 0 && persistence !== null) { + // if persisted, we store state and destroy ydocument + persistence.writeState(doc.name, doc).then(() => { + doc.destroy(); + }); + docs.delete(doc.name); + } + } + + conn.close(); +}; + +export const send = (doc: IWSSharedDoc, conn: any, m: Uint8Array) => { + if (conn.readyState !== wsReadyStateConnecting && conn.readyState !== wsReadyStateOpen) { + closeConn(doc, conn); + } + + try { + conn.send(m, (err: any) => { + if (err !== null) { + closeConn(doc, conn); + } + }); + } catch (e) { + closeConn(doc, conn); + } +}; + +export const setupWSConnection = ( + conn: any, + req: any, + { docName = req.url.slice(1).split('?')[0], gc = true } = {} +) => { + conn.binaryType = 'arraybuffer'; + // get doc, initialize if it does not exist yet + const doc = getYDoc(docName, gc); + doc.conns.set(conn, new Set()); + // listen and reply to events + conn.on('message', (message: ArrayBuffer) => messageListener(conn, doc, new Uint8Array(message))); + + // Check if connection is still alive + let pongReceived = true; + const pingInterval = setInterval(() => { + if (!pongReceived) { + if (doc.conns.has(conn)) { + closeConn(doc, conn); + } + + clearInterval(pingInterval); + } else if (doc.conns.has(conn)) { + pongReceived = false; + + try { + conn.ping(); + } catch (e) { + closeConn(doc, conn); + clearInterval(pingInterval); + } + } + }, pingTimeout); + conn.on('close', () => { + closeConn(doc, conn); + clearInterval(pingInterval); + }); + conn.on('pong', () => { + pongReceived = true; + }); + + // put the following in a variables in a block so the interval handlers don't keep in in + // scope + { + // send sync step 1 + const encoder = encoding.createEncoder(); + encoding.writeVarUint(encoder, messageSync); + syncProtocol.writeSyncStep1(encoder, doc); + send(doc, conn, encoding.toUint8Array(encoder)); + const awarenessStates = doc.awareness.getStates(); + + if (awarenessStates.size > 0) { + const encoder = encoding.createEncoder(); + encoding.writeVarUint(encoder, messageAwareness); + encoding.writeVarUint8Array( + encoder, + awarenessProtocol.encodeAwarenessUpdate(doc.awareness, Array.from(awarenessStates.keys())) + ); + send(doc, conn, encoding.toUint8Array(encoder)); + } + } +}; diff --git a/backend/collaboration/src/lib/y-postgres/ws-shared-doc.ts b/backend/collaboration/src/lib/y-postgres/ws-shared-doc.ts new file mode 100644 index 0000000000..5622fadae1 --- /dev/null +++ b/backend/collaboration/src/lib/y-postgres/ws-shared-doc.ts @@ -0,0 +1,77 @@ +import * as encoding from 'lib0/encoding'; +import * as awarenessProtocol from 'y-protocols/awareness'; +import * as syncProtocol from 'y-protocols/sync'; +import * as Y from 'yjs'; + +import { GC_ENABLED } from '@/config'; +import type { IWSSharedDoc } from '@/types/interfaces'; + +import { messageAwareness, messageSync } from './constants'; +import { send } from './utils'; + +const updateHandler = (update: Uint8Array, _origin: any, doc: Y.Doc) => { + const sharedDoc = doc as IWSSharedDoc; + + const encoder = encoding.createEncoder(); + encoding.writeVarUint(encoder, messageSync); + syncProtocol.writeUpdate(encoder, update); + const message = encoding.toUint8Array(encoder); + sharedDoc.conns.forEach((_, conn) => send(sharedDoc, conn, message)); +}; + +export class WSSharedDoc extends Y.Doc implements IWSSharedDoc { + name: string; + conns: Map>; + awareness: awarenessProtocol.Awareness; + + constructor(name: string) { + super({ gc: GC_ENABLED }); + this.name = name; + this.conns = new Map(); + this.awareness = new awarenessProtocol.Awareness(this); + this.awareness.setLocalState(null); + + const awarenessChangeHandler = ( + { + added, + updated, + removed, + }: { + added: Array; + updated: Array; + removed: Array; + }, + conn: object | null + ) => { + const changedClients = added.concat(updated, removed); + + if (conn !== null) { + const connControlledIDs = /** @type {Set} */ this.conns.get(conn); + + if (connControlledIDs !== undefined) { + added.forEach((clientID) => { + connControlledIDs.add(clientID); + }); + removed.forEach((clientID) => { + connControlledIDs.delete(clientID); + }); + } + } + + // broadcast awareness update + const encoder = encoding.createEncoder(); + encoding.writeVarUint(encoder, messageAwareness); + encoding.writeVarUint8Array( + encoder, + awarenessProtocol.encodeAwarenessUpdate(this.awareness, changedClients) + ); + const buff = encoding.toUint8Array(encoder); + this.conns.forEach((_, c) => { + send(this, c, buff); + }); + }; + + this.awareness.on('update', awarenessChangeHandler); + this.on('update', updateHandler); + } +} diff --git a/backend/collaboration/src/routes/chat.ts b/backend/collaboration/src/routes/chat.ts new file mode 100644 index 0000000000..1525da03e1 --- /dev/null +++ b/backend/collaboration/src/routes/chat.ts @@ -0,0 +1,10 @@ +import express from 'express'; + +import { queryOpenAI } from '@/controller/openai-controller'; + +const router = express.Router(); + +router.post('/chat/stream', queryOpenAI); +router.post('/chat', queryOpenAI); + +export default router; diff --git a/backend/collaboration/src/routes/room.ts b/backend/collaboration/src/routes/room.ts new file mode 100644 index 0000000000..1fc9781ec3 --- /dev/null +++ b/backend/collaboration/src/routes/room.ts @@ -0,0 +1,13 @@ +import express from 'express'; + +import { getCollabRoom } from '@/controller/collab-controller'; +import { getRoomsController } from '@/controller/get-rooms-controller'; +import { authCheck } from '@/controller/room-auth-controller'; + +const router = express.Router(); + +router.get('/', getCollabRoom); +router.get('/rooms', getRoomsController); +router.get('/auth', authCheck); + +export default router; diff --git a/backend/collaboration/src/server.ts b/backend/collaboration/src/server.ts new file mode 100644 index 0000000000..bcaafec56a --- /dev/null +++ b/backend/collaboration/src/server.ts @@ -0,0 +1,71 @@ +import http from 'http'; +import { exit } from 'process'; + +import cors from 'cors'; +import { sql } from 'drizzle-orm'; +import express, { json } from 'express'; +import { StatusCodes } from 'http-status-codes'; +import pino from 'pino-http'; + +import { UI_HOST } from '@/config'; +import { config, db } from '@/lib/db'; +import { logger } from '@/lib/utils'; +import aiChatRoutes from '@/routes/chat'; +import roomRoutes from '@/routes/room'; + +import { setUpWSServer } from './ws'; + +const app = express(); + +app.use( + pino({ + serializers: { + req: ({ id, method, url, headers: { host, referer }, query, params }) => ({ + id, + method, + url, + headers: { host, referer }, + query, + params, + }), + res: ({ statusCode }) => ({ statusCode }), + }, + }) +); +app.use(json()); +app.use( + cors({ + origin: [UI_HOST], + credentials: true, + }) +); + +app.use('/ai', aiChatRoutes); +app.use('/room', roomRoutes); + +// Health Check for Docker +app.get('/health', (_req, res) => res.status(StatusCodes.OK).send('OK')); + +export const dbHealthCheck = async () => { + try { + await db.execute(sql`SELECT 1`); + logger.info('Connected to DB'); + } catch (error) { + const { message } = error as Error; + logger.error('Cannot connect to DB: ' + message); + logger.error(`DB Config: ${JSON.stringify({ ...config, password: '' })}`); + exit(1); + } +}; + +// Ensure DB service is up before running. +app.get('/test-db', async (_req, res) => { + await dbHealthCheck(); + res.json({ message: 'OK ' }); +}); + +const server = http.createServer(app); + +export const wss = setUpWSServer(server); + +export default server; diff --git a/backend/collaboration/src/service/get/collab-get-service.ts b/backend/collaboration/src/service/get/collab-get-service.ts new file mode 100644 index 0000000000..5996d058ae --- /dev/null +++ b/backend/collaboration/src/service/get/collab-get-service.ts @@ -0,0 +1,51 @@ +import crypto from 'crypto'; + +import { StatusCodes } from 'http-status-codes'; + +import { db, rooms } from '@/lib/db'; + +import { IGetCollabRoomPayload, IGetCollabRoomResponse } from './types'; + +export async function getCollabRoomService( + payload: IGetCollabRoomPayload +): Promise { + const { userid1, userid2, questionid } = payload; + + const qid = Number(questionid); + + if (!userid1 || !userid2 || isNaN(qid)) { + return { + code: StatusCodes.UNPROCESSABLE_ENTITY, + error: { + message: 'Malformed', + }, + }; + } + + const roomId = crypto.randomBytes(6).toString('hex'); + + try { + await db.insert(rooms).values({ + roomId, + userId1: userid1, + userId2: userid2, + questionId: qid, + createdAt: new Date(), + }); + + return { + code: StatusCodes.OK, + data: { + roomName: roomId, + }, + }; + } catch (error) { + console.error('Error saving room to database:', error); + return { + code: StatusCodes.INTERNAL_SERVER_ERROR, + error: { + message: 'Failed to create room', + }, + }; + } +} diff --git a/backend/collaboration/src/service/get/room-auth-service.ts b/backend/collaboration/src/service/get/room-auth-service.ts new file mode 100644 index 0000000000..113297f3c6 --- /dev/null +++ b/backend/collaboration/src/service/get/room-auth-service.ts @@ -0,0 +1,45 @@ +import { and, eq } from 'drizzle-orm'; +import { StatusCodes } from 'http-status-codes'; + +import { db, rooms } from '@/lib/db'; +import { IServiceResponse } from '@/types'; + +import { IGetAuthRoomPayload } from './types'; + +export const roomAuthService = async ( + params: IGetAuthRoomPayload +): Promise> => { + const authedRooms = await db + .select() + .from(rooms) + .where(and(eq(rooms.roomId, params.roomId))) + .limit(1); + + if (!authedRooms || authedRooms.length === 0) { + return { + code: StatusCodes.UNAUTHORIZED, + error: { + message: 'No room with the given ID exists', + }, + }; + } + + const authedRoom = authedRooms[0]; + const { userId1, userId2, questionId } = authedRoom; + + if (![userId1, userId2].includes(params.userId)) { + return { + code: StatusCodes.UNAUTHORIZED, + error: { + message: 'No room with the given ID exists', + }, + }; + } + + return { + code: StatusCodes.OK, + data: { + questionId, + }, + }; +}; diff --git a/backend/collaboration/src/service/get/rooms-get-service.ts b/backend/collaboration/src/service/get/rooms-get-service.ts new file mode 100644 index 0000000000..5e1aa5da2f --- /dev/null +++ b/backend/collaboration/src/service/get/rooms-get-service.ts @@ -0,0 +1,44 @@ +import { desc, eq, type InferSelectModel, or } from 'drizzle-orm'; +import { StatusCodes } from 'http-status-codes'; + +import { db, rooms } from '@/lib/db'; +import { logger } from '@/lib/utils'; +import type { IServiceResponse } from '@/types'; + +import type { IGetRoomsPayload } from './types'; + +export const getRoomsService = async ( + params: IGetRoomsPayload +): Promise>>> => { + const { offset, limit: rawLimit, userId } = params; + const limit = rawLimit && rawLimit > 0 ? rawLimit : 10; + let query = db + .select() + .from(rooms) + .where(or(eq(rooms.userId1, userId), eq(rooms.userId2, userId))) + .limit(limit) + .$dynamic(); + + if (offset) { + query = query.offset(offset * limit); + } + + query = query.orderBy(desc(rooms.createdAt)); + + try { + const result = await query; + return { + code: StatusCodes.OK, + data: result, + }; + } catch (error) { + const { name, message, stack, cause } = error as Error; + logger.error(`An error occurred: ` + JSON.stringify({ name, message, stack, cause })); + return { + code: StatusCodes.INTERNAL_SERVER_ERROR, + error: { + message, + }, + }; + } +}; diff --git a/backend/collaboration/src/service/get/types.ts b/backend/collaboration/src/service/get/types.ts new file mode 100644 index 0000000000..48d08cb5f8 --- /dev/null +++ b/backend/collaboration/src/service/get/types.ts @@ -0,0 +1,22 @@ +import { IServiceResponse } from '@/types'; + +export type IGetCollabRoomPayload = { + userid1: string; + userid2: string; + questionid: string; +}; + +export type IGetCollabRoomResponse = IServiceResponse<{ + roomName: string; +}>; + +export type IGetAuthRoomPayload = { + roomId: string; + userId: string; +}; + +export type IGetRoomsPayload = { + userId: string; + offset?: number; + limit?: number; +}; diff --git a/backend/collaboration/src/service/post/openai-service.ts b/backend/collaboration/src/service/post/openai-service.ts new file mode 100644 index 0000000000..7b71e5ca61 --- /dev/null +++ b/backend/collaboration/src/service/post/openai-service.ts @@ -0,0 +1,96 @@ +import { EventEmitter } from 'events'; + +import OpenAI from 'openai'; + +const openai = new OpenAI({ + apiKey: process.env.OPENAI_API_KEY, +}); + +interface OpenAIMessage { + role: 'system' | 'user' | 'assistant'; + content: string; +} + +interface OpenAIRequest { + messages: OpenAIMessage[]; + editorCode?: string; + language?: string; + questionDetails?: string; +} + +// Helper to create system message with context +const createSystemMessage = (editorCode?: string, language?: string, questionDetails?: string) => { + return { + role: 'system' as const, + content: `You are a mentor in a coding interview. +You are helping a user with a coding problem. +${questionDetails ? `\nQuestion Context:\n${JSON.stringify(questionDetails, null, 2)}` : ''} + +${editorCode ? `\nCurrent Code in the Editor written by the user in language: (${language || 'unknown'}):\n${editorCode}` : ''} + + +If they do not ask for questions related to their code or the question context, you can provide general coding advice anyways. Be very concise and conversational in your responses. + +Your response should only be max 4-5 sentences. Do NOT provide code in your answers, but instead try to guide them and give tips for how to solve it. YOU MUST NOT SOLVE THE PROBLEM FOR THEM, OR WRITE ANY CODE. Guide the user towards the solution, don't just give the solution. MAX 4-5 SENTENCES. Ask questions instead of giving answers. Be conversational and friendly.`, + }; +}; + +// Regular response function +export async function getOpenAIResponse(request: OpenAIRequest) { + const { messages, editorCode, language, questionDetails } = request; + + try { + const response = await openai.chat.completions.create({ + model: 'gpt-4o', + messages: [ + createSystemMessage(editorCode, language, questionDetails), + ...messages, + { + role: 'assistant', + content: + '', + }, + ], + }); + + if (response.choices && response.choices[0].message) { + return { + success: true, + message: response.choices[0].message.content, + }; + } else { + throw new Error('No valid response from OpenAI'); + } + } catch (error) { + throw new Error((error as Error)?.message || 'Failed to query OpenAI'); + } +} + +// Streaming response function +export async function getOpenAIStreamResponse(request: OpenAIRequest): Promise { + const { messages, editorCode, language, questionDetails } = request; + const stream = new EventEmitter(); + + try { + const response = await openai.chat.completions.create({ + model: 'gpt-4o', + messages: [createSystemMessage(editorCode, language, questionDetails), ...messages], + stream: true, + }); + + // Process the streaming response + for await (const chunk of response) { + const content = chunk.choices[0]?.delta?.content || ''; + + if (content) { + stream.emit('data', content); + } + } + + stream.emit('end'); + } catch (error) { + stream.emit('error', error); + } + + return stream; +} diff --git a/backend/collaboration/src/types/index.ts b/backend/collaboration/src/types/index.ts new file mode 100644 index 0000000000..6738aa624a --- /dev/null +++ b/backend/collaboration/src/types/index.ts @@ -0,0 +1 @@ +export * from './utility'; diff --git a/backend/collaboration/src/types/interfaces.ts b/backend/collaboration/src/types/interfaces.ts new file mode 100644 index 0000000000..e25861660a --- /dev/null +++ b/backend/collaboration/src/types/interfaces.ts @@ -0,0 +1,14 @@ +import * as awarenessProtocol from 'y-protocols/awareness.js'; +import * as Y from 'yjs'; + +export interface IWSSharedDoc extends Y.Doc { + name: string; + conns: Map>; + awareness: awarenessProtocol.Awareness; +} + +export interface IPersistence { + bindState: (arg1: string, arg2: IWSSharedDoc) => void; + writeState: (arg1: string, arg2: IWSSharedDoc) => Promise; + provider?: any; +} diff --git a/backend/collaboration/src/types/utility.ts b/backend/collaboration/src/types/utility.ts new file mode 100644 index 0000000000..a07fa1295a --- /dev/null +++ b/backend/collaboration/src/types/utility.ts @@ -0,0 +1,9 @@ +import type { StatusCodes } from 'http-status-codes'; + +export type IServiceResponse = { + code: StatusCodes; + error?: { + message: string; + }; + data?: T; +}; diff --git a/backend/collaboration/src/ws.ts b/backend/collaboration/src/ws.ts new file mode 100644 index 0000000000..e14f706c39 --- /dev/null +++ b/backend/collaboration/src/ws.ts @@ -0,0 +1,15 @@ +import http from 'http'; + +import { WebSocketServer } from 'ws'; + +import { setUpPersistence, setupWSConnection } from '@/lib/y-postgres'; + +export const setUpWSServer = (server: ReturnType<(typeof http)['createServer']>) => { + const wss = new WebSocketServer({ server }); + + wss.on('connection', setupWSConnection); + + setUpPersistence(); + + return wss; +}; diff --git a/backend/collaboration/tsconfig.json b/backend/collaboration/tsconfig.json new file mode 100644 index 0000000000..a550460793 --- /dev/null +++ b/backend/collaboration/tsconfig.json @@ -0,0 +1,108 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig to read more about this file */ + /* Projects */ + // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ + // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ + // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ + // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ + // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ + // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ + /* Language and Environment */ + "target": "ES2022" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, + // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + // "jsx": "preserve", /* Specify what JSX code is generated. */ + // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ + // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ + // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ + // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ + // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ + // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ + // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ + // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ + // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ + /* Modules */ + "module": "commonjs" /* Specify what module code is generated. */, + "rootDir": "./src" /* Specify the root folder within your source files. */, + // "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */ + "baseUrl": "." /* Specify the base directory to resolve non-relative module names. */, + "paths": { + "@/*": ["./src/*"] + } /* Specify a set of entries that re-map imports to additional lookup locations. */, + // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ + // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ + // "types": [], /* Specify type package names to be included without being referenced in a source file. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ + // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ + // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ + // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ + // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ + // "resolveJsonModule": true, /* Enable importing .json files. */ + // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ + // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ + /* JavaScript Support */ + // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ + // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ + // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ + /* Emit */ + // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ + // "declarationMap": true, /* Create sourcemaps for d.ts files. */ + // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ + // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ + // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ + // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ + "outDir": "./dist" /* Specify an output folder for all emitted files. */, + // "removeComments": true, /* Disable emitting comments. */ + // "noEmit": true, /* Disable emitting files from a compilation. */ + // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ + // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ + // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ + // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ + // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ + // "newLine": "crlf", /* Set the newline character for emitting files. */ + // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ + // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ + // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ + // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ + // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ + // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ + /* Interop Constraints */ + // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ + // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ + // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ + "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */, + // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ + "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */, + /* Type Checking */ + "strict": true /* Enable all strict type-checking options. */, + // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ + // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ + // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ + // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ + // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ + // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ + // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ + // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ + // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ + // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ + // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ + // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ + // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ + // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ + // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ + // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ + /* Completeness */ + // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ + "skipLibCheck": true /* Skip type checking all .d.ts files. */ + }, + "exclude": ["drizzle.*.*ts"], + "ts-node": { + "swc": true, + "require": ["tsconfig-paths/register"] + } +} diff --git a/backend/matching/.dockerignore b/backend/matching/.dockerignore new file mode 100644 index 0000000000..d26c7464b6 --- /dev/null +++ b/backend/matching/.dockerignore @@ -0,0 +1,2 @@ +node_modules +dist/ \ No newline at end of file diff --git a/backend/matching/.env.compose b/backend/matching/.env.compose new file mode 100644 index 0000000000..48a60cced6 --- /dev/null +++ b/backend/matching/.env.compose @@ -0,0 +1,10 @@ +# To be injected by Docker Compose +# PEERPREP_UI_HOST="http://frontend:3000" +# MATCHING_DB_HOSTNAME="match-db" +# MATCHING_DB_PORT=6379 + +EXPRESS_PORT=9004 + +MATCHING_DB_USERNAME="peerprep-match-express" +MATCHING_DB_PASSWORD="G7jBgyz9wGAFQ5La" +REDIS_ARGS="--requirepass G7jBgyz9wGAFQ5La --user peerprep-match-express on >G7jBgyz9wGAFQ5La ~* allcommands --user default off nopass nocommands" diff --git a/backend/matching/.env.docker b/backend/matching/.env.docker new file mode 100644 index 0000000000..6f57d3487d --- /dev/null +++ b/backend/matching/.env.docker @@ -0,0 +1,13 @@ +PEERPREP_UI_HOST=http://host.docker.internal:5173 + +EXPRESS_PORT=9004 + +PEERPREP_USER_HOST=http://host.docker.internal:9001 +PEERPREP_QUESTION_HOST=http://host.docker.internal:9002 +PEERPREP_COLLAB_HOST=http://host.docker.internal:9003 + +MATCHING_DB_HOSTNAME=host.docker.internal +MATCHING_DB_PORT=6378 + +MATCHING_DB_USERNAME=peerprep-match-express +MATCHING_DB_PASSWORD=G7jBgyz9wGAFQ5La diff --git a/backend/matching/.env.local b/backend/matching/.env.local new file mode 100644 index 0000000000..e4d9d584bf --- /dev/null +++ b/backend/matching/.env.local @@ -0,0 +1,14 @@ +PEERPREP_UI_HOST=http://localhost:5173 + +EXPRESS_PORT=9004 + +PEERPREP_USER_HOST=http://localhost:9001 +PEERPREP_QUESTION_HOST=http://localhost:9002 +PEERPREP_COLLAB_HOST=http://localhost:9003 + +MATCHING_DB_HOSTNAME=localhost +MATCHING_DB_PORT=6378 + +MATCHING_DB_USERNAME="peerprep-match-express" +MATCHING_DB_PASSWORD="G7jBgyz9wGAFQ5La" +REDIS_ARGS="--requirepass G7jBgyz9wGAFQ5La --user ${MATCHING_DB_USERNAME} on >G7jBgyz9wGAFQ5La ~* allcommands --user default off nopass nocommands" diff --git a/backend/matching/README.md b/backend/matching/README.md new file mode 100644 index 0000000000..3a791f478f --- /dev/null +++ b/backend/matching/README.md @@ -0,0 +1,29 @@ +# Matching Service + +## Running with Docker (Standalone) + +1. Run this command to build: + ```sh + docker build \ + -t match-express-local \ + --build-arg port=9004 \ + -f express.Dockerfile . + ``` +2. Run this command, from the root folder: + ```sh + make db-up + ``` + +3. Run the necessary migrate and seed commands, if you haven't yet. + +4. Run this command to expose the container: + ```sh + docker run -p 9004:9004 --env-file ./.env.docker match-express-local + ``` +5. To stop the process, use the Docker UI or CLI with `docker rm -f ` (The child process loop has issues terminating) + +## Running with Docker-Compose (Main config) + +Edit the variables in the `.env.compose` file and run `make up` from the root folder. + +Any startup instructions will be run from `entrypoint.sh` instead. diff --git a/backend/matching/entrypoint.sh b/backend/matching/entrypoint.sh new file mode 100755 index 0000000000..8c3d36dfc9 --- /dev/null +++ b/backend/matching/entrypoint.sh @@ -0,0 +1,9 @@ +#!/bin/sh + +npm run db:seed:prod + +rm -rf src tsconfig.json + +npm uninstall tsx + +npm run start diff --git a/backend/matching/express.Dockerfile b/backend/matching/express.Dockerfile new file mode 100644 index 0000000000..b43681e9e1 --- /dev/null +++ b/backend/matching/express.Dockerfile @@ -0,0 +1,23 @@ +FROM node:lts-alpine AS build +WORKDIR /data/match-express +COPY package*.json ./ +RUN npm install +COPY . . +RUN npm run build + +FROM node:lts-alpine AS production +WORKDIR /data/match-express +COPY --from=build /data/match-express/package*.json ./ +COPY --from=build --chown=node:node /data/match-express/dist ./dist + +RUN npm ci --omit=dev + +COPY src/lib/db ./src/lib/db +COPY src/lib/utils ./src/lib/utils +COPY src/config.ts ./src +COPY tsconfig.json . +COPY entrypoint.sh . + +ARG port +EXPOSE ${port} +ENTRYPOINT [ "/bin/sh", "entrypoint.sh" ] \ No newline at end of file diff --git a/backend/matching/package.json b/backend/matching/package.json new file mode 100644 index 0000000000..b061632d87 --- /dev/null +++ b/backend/matching/package.json @@ -0,0 +1,44 @@ +{ + "name": "matching", + "version": "1.0.0", + "main": "dist/index.js", + "scripts": { + "dev": "env-cmd -f .env.local nodemon src/index.ts | pino-pretty", + "build": "tsc && tsc-alias", + "start": "node dist/index.js", + "build:local": "env-cmd -f .env.local tsc && tsc-alias", + "start:local": "env-cmd -f .env.local node dist/index.js", + "db:seed": "env-cmd -f .env.local tsx src/lib/db/seed.ts", + "db:seed:prod": "tsx src/lib/db/seed.ts", + "fmt": "prettier --config .prettierrc src --write", + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "description": "", + "dependencies": { + "async": "^3.2.6", + "axios": "^1.7.7", + "cors": "^2.8.5", + "dotenv": "^16.4.5", + "env-cmd": "^10.1.0", + "express": "^4.21.0", + "http-status-codes": "^2.3.0", + "pino": "^9.4.0", + "pino-http": "^10.3.0", + "redis": "^4.7.0", + "socket.io": "^4.8.0", + "tsx": "^4.19.1" + }, + "devDependencies": { + "@types/async": "^3.2.24", + "@types/cors": "^2.8.17", + "@types/express": "^4.17.21", + "@types/node": "^22.5.5", + "nodemon": "^3.1.4", + "pino-pretty": "^11.2.2", + "ts-node": "^10.9.2", + "tsc-alias": "^1.8.10" + } +} diff --git a/backend/matching/src/config.ts b/backend/matching/src/config.ts new file mode 100644 index 0000000000..62d6ba2627 --- /dev/null +++ b/backend/matching/src/config.ts @@ -0,0 +1,20 @@ +import 'dotenv/config'; + +export const UI_HOST = process.env.PEERPREP_UI_HOST!; + +export const EXPRESS_PORT = process.env.EXPRESS_PORT; + +export const PEERPREP_USER_HOST = process.env.PEERPREP_USER_HOST; +export const PEERPREP_QUESTION_HOST = process.env.PEERPREP_QUESTION_HOST; +export const PEERPREP_COLLAB_HOST = process.env.PEERPREP_COLLAB_HOST; + +export const DB_HOSTNAME = process.env.MATCHING_DB_HOSTNAME; +export const DB_PORT = Number.parseInt(process.env.MATCHING_DB_PORT ?? '6379'); +export const DB_USERNAME = process.env.MATCHING_DB_USERNAME; +export const DB_PASSWORD = process.env.MATCHING_DB_PASSWORD; + +export const NODE_ENV = process.env.NODE_ENV; + +export const IS_MILESTONE_D4 = true; + +export const WORKER_SLEEP_TIME_IN_MILLIS = 500; diff --git a/backend/matching/src/controllers/cancel-request.ts b/backend/matching/src/controllers/cancel-request.ts new file mode 100644 index 0000000000..9f25949292 --- /dev/null +++ b/backend/matching/src/controllers/cancel-request.ts @@ -0,0 +1,74 @@ +import type { Request, Response } from 'express'; +import { StatusCodes } from 'http-status-codes'; + +import { client as redisClient, logQueueStatus } from '@/lib/db'; +import { STREAM_NAME } from '@/lib/db/constants'; +import { getPoolKey, getStreamId, logger } from '@/lib/utils'; +import { io } from '@/server'; +import { MATCHING_EVENT } from '@/ws/events'; + +export const cancelMatchRequestController = async (req: Request, res: Response) => { + const { userId } = req.body; // Only check for userId + + if (!userId) { + return res.status(StatusCodes.UNPROCESSABLE_ENTITY).json({ message: 'User ID is required' }); // No need for roomId + } + + try { + if (!redisClient.isOpen) { + await redisClient.connect(); + } + + // Check pending status using only userId + const result = await redisClient + .hGetAll(getPoolKey(userId)) + .then(async (value) => { + if (value.pending === 'true') { + const timestamp = value.timestamp; + await Promise.all([ + redisClient.del(getPoolKey(userId)), + timestamp + ? redisClient.xDel(STREAM_NAME, getStreamId(value.timestamp)) + : Promise.resolve(), + ]); + await logQueueStatus( + logger, + redisClient, + 'Queue Status after cancelling request: ' + ); + logger.info(`Request cancellation successful`); + const room = value.socketPort; + + if (room) { + io.sockets.in(room).socketsLeave(room); + } + + return { + success: true, + }; + } + + return { + success: false, + error: `Match in ${MATCHING_EVENT.MATCHING} state.`, + }; + }) + .catch((reason) => { + if (reason) { + return { + success: false, + error: reason, + }; + } + }); + + if (result?.success) { + return res.status(StatusCodes.OK).end(); + } else if (!result?.success) { + return res.status(StatusCodes.FORBIDDEN).json(result?.error); + } + } catch (error) { + console.error('Error canceling match:', error); + return res.status(500).json({ message: 'Server error, please try again later' }); + } +}; diff --git a/backend/matching/src/controllers/match-request.ts b/backend/matching/src/controllers/match-request.ts new file mode 100644 index 0000000000..5aedc89d7d --- /dev/null +++ b/backend/matching/src/controllers/match-request.ts @@ -0,0 +1,24 @@ +import type { Request, Response } from 'express'; +import { StatusCodes } from 'http-status-codes'; + +import { createNotifSocket } from '@/services'; +import type { IRequestMatchRESTPayload } from '@/types'; + +export const matchRequestController = async (req: Request, res: Response) => { + const payload: Partial = req.body; + const { userId } = payload; + + if (!userId) { + return res.status(StatusCodes.UNPROCESSABLE_ENTITY).json('Malformed Request'); + } + + const socketRoom = createNotifSocket(userId); + + // Send socket to user for subscription + return res + .status(StatusCodes.OK) + .json({ + socketPort: socketRoom, + }) + .end(); +}; diff --git a/backend/matching/src/index.ts b/backend/matching/src/index.ts new file mode 100644 index 0000000000..e28043f2b5 --- /dev/null +++ b/backend/matching/src/index.ts @@ -0,0 +1,36 @@ +import type { ChildProcess } from 'child_process'; + +import { EXPRESS_PORT } from '@/config'; +import { logger } from '@/lib/utils'; +import server, { io } from '@/server'; +import { initWorker } from '@/workers'; + +const workers: Array = []; + +const port = Number.parseInt(EXPRESS_PORT || '8001'); + +const listenMessage = `App listening on port: ${port}`; +server.listen(port, () => { + logger.info(listenMessage); + ['Cleaner', 'Matcher'] + .map((name) => initWorker(name, io)) + .forEach((process) => workers.push(process)); +}); + +const shutdown = () => { + server.close(() => { + workers.forEach((worker) => { + worker.kill(); + }); + void io + .close(() => { + logger.info('WS Server shut down'); + }) + .then(() => { + logger.info('App shut down'); + }); + }); +}; + +process.on('SIGINT', shutdown); +process.on('SIGTERM', shutdown); diff --git a/backend/matching/src/lib/db/client.ts b/backend/matching/src/lib/db/client.ts new file mode 100644 index 0000000000..30a09a33a2 --- /dev/null +++ b/backend/matching/src/lib/db/client.ts @@ -0,0 +1,27 @@ +import { createClient } from 'redis'; + +import { DB_HOSTNAME, DB_PASSWORD, DB_PORT, DB_USERNAME } from '@/config'; +import { logger } from '@/lib/utils'; + +class RedisClient { + client: ReturnType; + constructor() { + this.client = createClient({ + username: DB_USERNAME, + password: DB_PASSWORD, + socket: { + host: DB_HOSTNAME, + port: DB_PORT, + }, + }) + .on('error', (err) => { + const { name, message, stack, cause } = err as Error; + logger.error({ name, message, stack, cause }, 'Redis Client error'); + }) + .on('connect', () => { + logger.info('Redis Client connected'); + }); + } +} + +export const client = new RedisClient().client; diff --git a/backend/matching/src/lib/db/constants.ts b/backend/matching/src/lib/db/constants.ts new file mode 100644 index 0000000000..91a9475912 --- /dev/null +++ b/backend/matching/src/lib/db/constants.ts @@ -0,0 +1,11 @@ +export const SEED_KEY = 'SEED'; + +// Hash Pool for matching +export const POOL_INDEX = 'requestsIdx'; +export const MATCH_PREFIX = 'match:'; + +// Stream for queuing +export const STREAM_NAME = 'requests'; +export const STREAM_GROUP = 'requestsGroup'; // XGROUP CREATE STREAM_NAME STREAM_GROUP +export const STREAM_WORKER = 'requestsProcessor'; // XGROUP CREATECONSUMER STREAM_NAME STREAM_GROUP STREAM_WORKER +export const STREAM_CLEANER = 'requestsCleaner'; // XGROUP CREATECONSUMER STREAM_NAME STREAM_GROUP STREAM_CLEANER diff --git a/backend/matching/src/lib/db/index.ts b/backend/matching/src/lib/db/index.ts new file mode 100644 index 0000000000..dd95b9afa8 --- /dev/null +++ b/backend/matching/src/lib/db/index.ts @@ -0,0 +1,2 @@ +export * from './client'; +export * from './log-queue-status'; diff --git a/backend/matching/src/lib/db/log-queue-status.ts b/backend/matching/src/lib/db/log-queue-status.ts new file mode 100644 index 0000000000..e5bc7e9bc5 --- /dev/null +++ b/backend/matching/src/lib/db/log-queue-status.ts @@ -0,0 +1,26 @@ +import { IS_MILESTONE_D4 } from '@/config'; + +import { client } from './client'; +import { STREAM_NAME } from './constants'; + +export const getQueueStatusLog = async (redisClient: typeof client) => { + const queueStatus = await redisClient.xRange(STREAM_NAME, '-', '+'); + const messages = queueStatus + .map((v) => v.message) + .map(({ userId, topic, difficulty }) => ({ userId, topic, difficulty })); + return JSON.stringify(messages); +}; + +export const logQueueStatus = async ( + // eslint-disable-next-line + logger: { info: (...m: any[]) => void }, + redisClient: typeof client, + message: string +) => { + if (!IS_MILESTONE_D4) { + return; + } + + const queueStatusLog = await getQueueStatusLog(redisClient); + logger.info(message.replace('', queueStatusLog)); +}; diff --git a/backend/matching/src/lib/db/seed.ts b/backend/matching/src/lib/db/seed.ts new file mode 100644 index 0000000000..3380c639ae --- /dev/null +++ b/backend/matching/src/lib/db/seed.ts @@ -0,0 +1,80 @@ +import { SchemaFieldTypes } from 'redis'; + +import { client } from './client'; +import { + MATCH_PREFIX, + POOL_INDEX, + SEED_KEY, + STREAM_CLEANER, + STREAM_GROUP, + STREAM_NAME, + STREAM_WORKER, +} from './constants'; + +const logger = { + info: (message: string) => { + console.log(`[MatchDB]: ${message}`); + }, +}; + +const main = async () => { + const redisClient = await client.connect(); + + if (!redisClient) { + return; + } + + const isSeeded = await redisClient.hGetAll(SEED_KEY); + + if (Object.keys(isSeeded).length > 0) { + const { timeStamp, value } = isSeeded; + + if (value === 'true') { + logger.info('Seeded at: ' + new Date(Number.parseInt(timeStamp)).toLocaleString()); + return; + } + } + + // Set Search Index + await redisClient.ft.create( + POOL_INDEX, + { + userId: { + type: SchemaFieldTypes.TEXT, + }, + topic: { + type: SchemaFieldTypes.TAG, + }, + difficulty: { + type: SchemaFieldTypes.TAG, + }, + pending: { + type: SchemaFieldTypes.TEXT, + }, + timestamp: { + type: SchemaFieldTypes.NUMERIC, + SORTABLE: true, + }, + }, + { + ON: 'HASH', + PREFIX: MATCH_PREFIX, + } + ); + + // Create Stream + await redisClient.xGroupCreate(STREAM_NAME, STREAM_GROUP, '$', { MKSTREAM: true }); + await redisClient.xGroupCreateConsumer(STREAM_NAME, STREAM_GROUP, STREAM_WORKER); + await redisClient.xGroupCreateConsumer(STREAM_NAME, STREAM_GROUP, STREAM_CLEANER); + + // Set seeded + await redisClient.hSet(SEED_KEY, { + value: 'true', + timeStamp: Date.now(), + }); + logger.info('Seeded!'); +}; + +void main().then(() => { + process.exit(0); +}); diff --git a/backend/matching/src/lib/utils/decode-pool-ticket.ts b/backend/matching/src/lib/utils/decode-pool-ticket.ts new file mode 100644 index 0000000000..4af08825d6 --- /dev/null +++ b/backend/matching/src/lib/utils/decode-pool-ticket.ts @@ -0,0 +1,19 @@ +import { IPoolTicket, IStreamMessage } from '@/types'; + +export const decodePoolTicket = (ticket: IStreamMessage) => { + const { userId, socketPort, topic, difficulty, timestamp } = ticket.message + ? (ticket.message as IPoolTicket) + : (ticket.value as IPoolTicket); + return { + id: ticket.id, + userId, + socketPort, + timestamp, + topic: topic + ? typeof topic === 'string' + ? topic.split(',').join('|') // For OR match as opposed to AND in sequence + : topic.join('|') + : undefined, + difficulty, + }; +}; diff --git a/backend/matching/src/lib/utils/get-pool-key.ts b/backend/matching/src/lib/utils/get-pool-key.ts new file mode 100644 index 0000000000..e8b8fc2f95 --- /dev/null +++ b/backend/matching/src/lib/utils/get-pool-key.ts @@ -0,0 +1,9 @@ +import { MATCH_PREFIX } from '@/lib/db/constants'; + +export const getPoolKey = (userId: string) => { + return `${MATCH_PREFIX}${userId}`; +}; + +export const getStreamId = (timestamp: string) => { + return `${timestamp}-0`; +}; diff --git a/backend/matching/src/lib/utils/get-redis-payload.ts b/backend/matching/src/lib/utils/get-redis-payload.ts new file mode 100644 index 0000000000..ae4915022a --- /dev/null +++ b/backend/matching/src/lib/utils/get-redis-payload.ts @@ -0,0 +1,14 @@ +import type { IPoolTicket } from '@/types'; + +export const getRedisPayload = (payload: IPoolTicket) => { + const { topic, difficulty, ...rest } = payload; + // eslint-disable-next-line @typescript-eslint/no-empty-object-type + const difficultyField: { difficulty: string } | {} = difficulty ? { difficulty } : {}; + // eslint-disable-next-line @typescript-eslint/no-empty-object-type + const topicField: { topic: string } | {} = payload.topic + ? Array.isArray(topic) + ? { topic: topic.join(',') } + : { topic } + : {}; + return { ...rest, ...topicField, ...difficultyField, pending: 'true' }; +}; diff --git a/backend/matching/src/lib/utils/index.ts b/backend/matching/src/lib/utils/index.ts new file mode 100644 index 0000000000..10bef1aefd --- /dev/null +++ b/backend/matching/src/lib/utils/index.ts @@ -0,0 +1,4 @@ +export * from './decode-pool-ticket'; +export * from './get-pool-key'; +export * from './get-redis-payload'; +export * from './logger'; diff --git a/backend/matching/src/lib/utils/logger.ts b/backend/matching/src/lib/utils/logger.ts new file mode 100644 index 0000000000..e41655d003 --- /dev/null +++ b/backend/matching/src/lib/utils/logger.ts @@ -0,0 +1,3 @@ +import pinoLogger from 'pino'; + +export const logger = pinoLogger(); diff --git a/backend/matching/src/routes/match.ts b/backend/matching/src/routes/match.ts new file mode 100644 index 0000000000..b2961f4b9d --- /dev/null +++ b/backend/matching/src/routes/match.ts @@ -0,0 +1,11 @@ +import { Router } from 'express'; + +import { cancelMatchRequestController } from '@/controllers/cancel-request'; +import { matchRequestController } from '@/controllers/match-request'; + +const route = Router(); + +route.post('/request', matchRequestController); +route.post('/cancel', cancelMatchRequestController); + +export default route; diff --git a/backend/matching/src/server.ts b/backend/matching/src/server.ts new file mode 100644 index 0000000000..d087973839 --- /dev/null +++ b/backend/matching/src/server.ts @@ -0,0 +1,46 @@ +import http from 'http'; + +import cors from 'cors'; +import express, { json } from 'express'; +import { StatusCodes } from 'http-status-codes'; +import pino from 'pino-http'; + +import matchRouter from '@/routes/match'; + +import { UI_HOST } from './config'; +import { createWs } from './ws'; + +const app = express(); +app.use( + pino({ + serializers: { + req: ({ id, method, url, headers: { host, referer }, query, params }) => ({ + id, + method, + url, + headers: { host, referer }, + query, + params, + }), + res: ({ statusCode }) => ({ statusCode }), + }, + }) +); +app.use(json()); +app.use( + cors({ + origin: [UI_HOST], + credentials: true, + }) +); + +app.use('/match', matchRouter); + +// Health Check for Docker +app.get('/health', (_req, res) => res.status(StatusCodes.OK).send('OK')); + +const server = http.createServer(app); + +export const io = createWs(server); + +export default server; diff --git a/backend/matching/src/services/_hosts.ts b/backend/matching/src/services/_hosts.ts new file mode 100644 index 0000000000..dbec069901 --- /dev/null +++ b/backend/matching/src/services/_hosts.ts @@ -0,0 +1,48 @@ +import axios from 'axios'; + +import { PEERPREP_COLLAB_HOST, PEERPREP_QUESTION_HOST, PEERPREP_USER_HOST } from '@/config'; + +const basePostConfig = { + withCredentials: true, + headers: { + 'Content-Type': 'application/json', + }, +}; + +export const userServiceClient = axios.create({ + baseURL: PEERPREP_USER_HOST, + ...basePostConfig, +}); + +export const questionServiceClient = axios.create({ + baseURL: PEERPREP_QUESTION_HOST, + ...basePostConfig, +}); + +export const collabServiceClient = axios.create({ + baseURL: PEERPREP_COLLAB_HOST, + withCredentials: true, +}); + +export const routes = { + USER_SERVICE: { + ATTEMPTED_QNS: { + GET: { + path: '/user/attempted-question/get', + }, + ADD: { + path: '/user/attempted-question/add', + }, + }, + }, + QUESTION_SERVICE: { + GET_RANDOM_QN: { + path: '/questions/random', + }, + }, + COLLAB_SERVICE: { + GET_ROOM: { + path: '/room', + }, + }, +}; diff --git a/backend/matching/src/services/collab.ts b/backend/matching/src/services/collab.ts new file mode 100644 index 0000000000..3d06ca34c4 --- /dev/null +++ b/backend/matching/src/services/collab.ts @@ -0,0 +1,25 @@ +import { collabServiceClient, routes } from './_hosts'; + +export async function createRoom( + userId1: string, + userId2: string, + questionId: string, + _attemptCounts: number +): Promise { + const response = await collabServiceClient.get<{ roomName: string }>( + routes.COLLAB_SERVICE.GET_ROOM.path, + { + params: { + userid1: userId1, + userid2: userId2, + questionid: questionId, + }, + } + ); + + if (response.status !== 200 || !response.data?.roomName) { + throw new Error('Failed to create room'); + } + + return response?.data?.roomName ?? undefined; +} diff --git a/backend/matching/src/services/create-notif-socket.ts b/backend/matching/src/services/create-notif-socket.ts new file mode 100644 index 0000000000..b13c4a5e22 --- /dev/null +++ b/backend/matching/src/services/create-notif-socket.ts @@ -0,0 +1,5 @@ +export const createNotifSocket = (userId: string) => { + const dateString = Date.now().toString(36); + const roomId = `${userId}_${dateString}`; + return roomId; +}; diff --git a/backend/matching/src/services/get-match-items.ts b/backend/matching/src/services/get-match-items.ts new file mode 100644 index 0000000000..2bae1b1822 --- /dev/null +++ b/backend/matching/src/services/get-match-items.ts @@ -0,0 +1,53 @@ +import { logger } from '@/lib/utils'; +import type { IMatchItemsResponse, IMatchType } from '@/types'; + +import { createRoom } from './collab'; +import { getRandomQuestion } from './question'; + +export async function getMatchItems( + searchIdentifier: IMatchType, + topic?: string, + difficulty?: string, + userId1?: string, + userId2?: string +): Promise { + try { + if (!userId1 || !userId2) { + throw new Error('Both user IDs are required'); + } + + const topics = topic?.split('|') ?? []; + const payload = { + userId1, + userId2, + ...(searchIdentifier === 'difficulty' && difficulty ? { difficulty } : {}), + ...(searchIdentifier === 'topic' && topic ? { topics } : {}), + ...(searchIdentifier === 'exact match' && topic && difficulty ? { topics, difficulty } : {}), + }; + + // Get a random question + const question = await getRandomQuestion(payload); + + if (!question) { + logger.info('No matching question found'); + return undefined; + } + + const roomId = await createRoom( + userId1, + userId2, + question.id.toString(), + question.attemptCount + ); + + logger.info('Successfully got match items'); + return { + roomId, + questionId: question.id, + }; + } catch (error) { + const { name, message, stack, cause } = error as Error; + logger.error(`Error in getMatchItems: ${JSON.stringify({ name, message, stack, cause })}`); + return undefined; + } +} diff --git a/backend/matching/src/services/index.ts b/backend/matching/src/services/index.ts new file mode 100644 index 0000000000..68e6b3e904 --- /dev/null +++ b/backend/matching/src/services/index.ts @@ -0,0 +1,3 @@ +export * from './create-notif-socket'; +export * from './get-match-items'; +export * from './queue'; diff --git a/backend/matching/src/services/question.ts b/backend/matching/src/services/question.ts new file mode 100644 index 0000000000..e6ea8bedfb --- /dev/null +++ b/backend/matching/src/services/question.ts @@ -0,0 +1,16 @@ +import type { IGetRandomQuestionPayload, IQuestion } from '@/types'; + +import { questionServiceClient, routes } from './_hosts'; + +export async function getRandomQuestion(payload: IGetRandomQuestionPayload): Promise { + const response = await questionServiceClient.post( + routes.QUESTION_SERVICE.GET_RANDOM_QN.path, + payload + ); + + if (response.status !== 200 || !response.data) { + throw new Error(response.statusText || 'Failed to get a random question'); + } + + return response?.data ?? undefined; +} diff --git a/backend/matching/src/services/queue.ts b/backend/matching/src/services/queue.ts new file mode 100644 index 0000000000..c1e282040e --- /dev/null +++ b/backend/matching/src/services/queue.ts @@ -0,0 +1,11 @@ +import { STREAM_NAME } from '@/lib/db/constants'; +import { getPoolKey, getRedisPayload } from '@/lib/utils'; +import type { IQueueRequest, IRedisClient } from '@/types'; + +export const queueingService = async (client: IRedisClient, payload: IQueueRequest) => { + const formattedPayload = getRedisPayload(payload); + // Add to queue + await client.xAdd(STREAM_NAME, formattedPayload.timestamp, formattedPayload); + // Add to matching pool + await client.hSet(getPoolKey(payload.userId), formattedPayload); +}; diff --git a/backend/matching/src/services/user.ts b/backend/matching/src/services/user.ts new file mode 100644 index 0000000000..7aa84b46f4 --- /dev/null +++ b/backend/matching/src/services/user.ts @@ -0,0 +1,16 @@ +import { routes, userServiceClient } from './_hosts'; + +export async function fetchAttemptedQuestions(userId: string): Promise> { + const response = await userServiceClient.post>( + routes.USER_SERVICE.ATTEMPTED_QNS.GET.path, + { + userId, + } + ); + + if (response.status !== 200 || !response.data) { + throw new Error(`Failed to fetch attempted questions for user ${userId}`); + } + + return response.data || []; +} diff --git a/backend/matching/src/types/index.ts b/backend/matching/src/types/index.ts new file mode 100644 index 0000000000..6f08d588dc --- /dev/null +++ b/backend/matching/src/types/index.ts @@ -0,0 +1,75 @@ +import { client } from '@/lib/db'; +import { MATCHING_EVENT } from '@/ws/events'; + +const DIFFICULTIES = ['Easy', 'Medium', 'Hard'] as const; + +export type ITopicDifficulty = (typeof DIFFICULTIES)[number]; + +export type IRequestMatchRESTPayload = { + userId: string; +}; + +export type IRequestMatchWSPayload = { + topic: string | Array; + difficulty: string; +}; + +export type IRequestMatchEvent = IRequestMatchWSPayload & + IRequestMatchRESTPayload & { + roomId: string; + }; + +export type IQueueRequest = Partial & + IRequestMatchRESTPayload & { + socketPort: string; + timestamp: string; + }; + +export type IPoolTicket = IQueueRequest; + +export type IRedisClient = Awaited>; + +export type IStreamMessage = { + id: string; + message?: { + // Stream + [x: string]: string; + }; + value?: Awaited>['documents'][number]['value']; +}; + +export type IMatchEvent = (typeof MATCHING_EVENT)[keyof typeof MATCHING_EVENT]; +export type IChildProcessMessage = { + rooms: Array; + event: IMatchEvent; + message?: unknown; +}; +export type IMatchType = 'difficulty' | 'topic' | 'exact match' | undefined; + +export interface IServiceResponse { + success: boolean; + data?: T; + error?: { message: string }; +} + +export interface IQuestion { + id: number; + // title: string; + // description: string; + // difficulty: string; + // topic: string[]; + attemptCount: number; +} + +export interface IGetRandomQuestionPayload { + userId1: string; + userId2: string; + difficulty?: string; + topics?: Array; +} + +export interface IMatchItemsResponse { + roomId: string; + questionId: number; + // question: IQuestion; +} diff --git a/backend/matching/src/workers/cleaner.js b/backend/matching/src/workers/cleaner.js new file mode 100644 index 0000000000..96b896c41c --- /dev/null +++ b/backend/matching/src/workers/cleaner.js @@ -0,0 +1,4 @@ +const path = require('path'); + +require('ts-node').register(); +require(path.resolve(__dirname, './cleaner.ts')); diff --git a/backend/matching/src/workers/cleaner.ts b/backend/matching/src/workers/cleaner.ts new file mode 100644 index 0000000000..85812d89f7 --- /dev/null +++ b/backend/matching/src/workers/cleaner.ts @@ -0,0 +1,92 @@ +import { WORKER_SLEEP_TIME_IN_MILLIS } from '@/config'; +import { client, logQueueStatus } from '@/lib/db'; +import { STREAM_CLEANER, STREAM_GROUP, STREAM_NAME } from '@/lib/db/constants'; +import { decodePoolTicket, getPoolKey } from '@/lib/utils'; +import { MATCHING_EVENT } from '@/ws/events'; + +import { connectClient, sendNotif } from './common'; + +const logger = { + info: (message: unknown) => process.send && process.send(message), + error: (message: unknown) => process.send && process.send(message), +}; + +let stopSignal = false; +let timeout: ReturnType; + +const cancel = () => { + stopSignal = true; + clearTimeout(timeout); +}; + +const shutdown = () => { + cancel(); + client.disconnect().then(() => { + process.exit(0); + }); +}; + +process.on('SIGINT', shutdown); +process.on('SIGTERM', shutdown); +process.on('exit', shutdown); + +async function clean() { + const redisClient = await connectClient(client); + const response = await redisClient.xAutoClaim( + STREAM_NAME, + STREAM_GROUP, + STREAM_CLEANER, + 30000, + '0-0' + ); + + if (!response || response.messages.length === 0) { + await new Promise((resolve, _reject) => { + timeout = setTimeout(() => resolve('Next Loop'), WORKER_SLEEP_TIME_IN_MILLIS); + }); + return; + } + + // ACK, Delete + for (const message of response.messages) { + if (!message) { + continue; + } + + logger.info(`Expiring ${JSON.stringify(message)}`); + const { userId, socketPort: socketRoom } = decodePoolTicket(message); + const POOL_KEY = getPoolKey(userId); + await Promise.all([ + // Delete from pool + redisClient.del(POOL_KEY), + // ACK + redisClient.xDel(STREAM_NAME, message.id), + ]); + + if (socketRoom) { + // Notify client + sendNotif([socketRoom], MATCHING_EVENT.FAILED); + sendNotif([socketRoom], MATCHING_EVENT.DISCONNECT); + } + + await logQueueStatus(logger, redisClient, `Queue Status after Expiring Request: `); + } +} + +logger.info('Process Healthy'); + +(function loop() { + if (stopSignal) { + return; + } + + Promise.resolve() + .then(async () => await clean()) + .catch((err) => { + if (err !== null) { + const { message, name, cause, stack } = err as Error; + logger.error(JSON.stringify({ message, name, cause, stack })); + } + }) + .then(() => process.nextTick(loop)); +})(); diff --git a/backend/matching/src/workers/common.ts b/backend/matching/src/workers/common.ts new file mode 100644 index 0000000000..82757b32cd --- /dev/null +++ b/backend/matching/src/workers/common.ts @@ -0,0 +1,35 @@ +// CHILD PROCESS UTIL LIB + +import { client } from '@/lib/db'; +import { logger } from '@/lib/utils'; +import type { IChildProcessMessage, IMatchEvent } from '@/types'; + +export const sendNotif = (roomIds: Array, event: IMatchEvent, message?: unknown) => { + if (process.send) { + const payload: IChildProcessMessage = { + rooms: roomIds, + event, + message, + }; + process.send(payload); + } +}; + +export const connectClient = async (importedClient: typeof client) => { + let redisClient: typeof client; + + try { + redisClient = + importedClient.isOpen || importedClient.isReady + ? importedClient + : await importedClient.connect(); + } catch (error) { + const { name, message, cause, stack } = error as Error; + logger.error( + `An error occurred in connecting: ${JSON.stringify({ name, message, cause, stack })}` + ); + process.exit(1); + } + + return redisClient; +}; diff --git a/backend/matching/src/workers/index.ts b/backend/matching/src/workers/index.ts new file mode 100644 index 0000000000..0e2b1c9bd1 --- /dev/null +++ b/backend/matching/src/workers/index.ts @@ -0,0 +1,48 @@ +// MAIN PROCESS +import { fork } from 'child_process'; +import path from 'path'; + +import type { Server } from 'socket.io'; + +import { logger } from '@/lib/utils'; +import type { IChildProcessMessage } from '@/types'; +import { MATCHING_EVENT } from '@/ws/events'; + +let nWorkers = 0; // For tracking graceful exit of main process + +export const initWorker = (name: string, io: Server) => { + const lCaseName = name.toLowerCase(); + const worker = fork(path.join(__dirname, `${lCaseName}.js`)); + nWorkers += 1; + const upperCaseName = name.replace(/^[A-Za-z]/, (c) => c.toUpperCase()); + worker.on('message', (message) => { + if (typeof message.valueOf() === 'string') { + logger.info({ pid: worker.pid }, `[${upperCaseName}]: ${message}`); + return; + } + + const messagePayload = message.valueOf(); + logger.info( + { pid: worker.pid }, + `[${upperCaseName}]: WS Payload: ${JSON.stringify(messagePayload)}` + ); + const { rooms, event, message: payload } = messagePayload as IChildProcessMessage; + + if (event === MATCHING_EVENT.DISCONNECT) { + io.sockets.in(rooms).socketsLeave(rooms); + return; + } + + io.sockets.in(rooms).emit(event, payload); + }); + worker.on('exit', (code) => { + logger.error({ pid: worker.pid }, `${upperCaseName} exited with code ${code}.`); + nWorkers -= 1; + + if (nWorkers === 0) { + logger.info('Main Process exiting.'); + process.exit(0); + } + }); + return worker; +}; diff --git a/backend/matching/src/workers/matcher.js b/backend/matching/src/workers/matcher.js new file mode 100644 index 0000000000..2f280f7c03 --- /dev/null +++ b/backend/matching/src/workers/matcher.js @@ -0,0 +1,4 @@ +const path = require('path'); + +require('ts-node').register(); +require(path.resolve(__dirname, './matcher.ts')); diff --git a/backend/matching/src/workers/matcher.ts b/backend/matching/src/workers/matcher.ts new file mode 100644 index 0000000000..e77171f707 --- /dev/null +++ b/backend/matching/src/workers/matcher.ts @@ -0,0 +1,230 @@ +import { WORKER_SLEEP_TIME_IN_MILLIS } from '@/config'; +import { client, logQueueStatus } from '@/lib/db'; +import { POOL_INDEX, STREAM_GROUP, STREAM_NAME, STREAM_WORKER } from '@/lib/db/constants'; +import { decodePoolTicket, getPoolKey, getStreamId } from '@/lib/utils'; +import { getMatchItems } from '@/services'; +import { IMatchType } from '@/types'; +import { MATCHING_EVENT } from '@/ws/events'; + +import { connectClient, sendNotif } from './common'; + +const logger = { + info: (message: unknown) => process.send && process.send(message), + error: (message: unknown) => process.send && process.send(message), +}; + +let stopSignal = false; +let timeout: ReturnType; + +const cancel = () => { + stopSignal = true; + clearTimeout(timeout); +}; + +const shutdown = () => { + cancel(); + client.disconnect().then(() => { + process.exit(0); + }); +}; + +process.on('SIGINT', shutdown); +process.on('SIGTERM', shutdown); +process.on('exit', shutdown); + +type RequestorParams = { + requestorUserId: string; + requestorStreamId: string; + requestorSocketPort: string; +}; + +async function processMatch( + redisClient: typeof client, + { requestorUserId, requestorStreamId, requestorSocketPort }: RequestorParams, + matches: Awaited>, + searchIdentifier?: IMatchType, + topic?: string, + difficulty?: string +) { + if (matches.total > 0) { + for (const matched of matches.documents) { + const { + userId: matchedUserId, + timestamp, // We use timestamp as the Stream ID + socketPort: matchedSocketPort, + } = decodePoolTicket(matched); + + if (matchedUserId === requestorUserId) { + continue; + } + + // To block cancellation + sendNotif([matchedSocketPort], MATCHING_EVENT.MATCHING); + await redisClient.hSet(getPoolKey(matchedUserId), 'pending', 'false'); + + const matchedStreamId = getStreamId(timestamp); + logger.info(`Found match: ${JSON.stringify(matched)}`); + + await Promise.all([ + // Remove other from pool + redisClient.del([getPoolKey(requestorUserId), getPoolKey(matchedUserId)]), + // Remove other from queue + redisClient.xDel(STREAM_NAME, [requestorStreamId, matchedStreamId]), + ]); + + // Notify both sockets + const matchItems = await getMatchItems( + searchIdentifier, + topic, + difficulty, + requestorUserId, + matchedUserId + ); + logger.info(`Generated Match - ${JSON.stringify(matchItems)}`); + sendNotif([requestorSocketPort, matchedSocketPort], MATCHING_EVENT.SUCCESS, matchItems); + sendNotif([requestorSocketPort, matchedSocketPort], MATCHING_EVENT.DISCONNECT); + + await logQueueStatus(logger, redisClient, `Queue Status After Matching: `); + return true; + } + } + + logger.info(`Found no matches` + (searchIdentifier ? ` for ${searchIdentifier}` : '')); + return false; +} + +async function match() { + const redisClient = await connectClient(client); + + const stream = await redisClient.xReadGroup( + STREAM_GROUP, + STREAM_WORKER, + { + key: STREAM_NAME, + id: '>', + }, + { + COUNT: 1, + BLOCK: 2000, + } + ); + + if (!stream || stream.length === 0) { + await new Promise((resolve, _reject) => { + timeout = setTimeout(() => resolve('Next Loop'), WORKER_SLEEP_TIME_IN_MILLIS); + }); + return; + } + + for (const group of stream) { + // Perform matching + for (const matchRequest of group.messages) { + logger.info(`Received request: ${JSON.stringify(matchRequest)}`); + // Query the pool + const { + id: requestorStreamId, + userId: requestorUserId, + socketPort: requestorSocketPort, + difficulty, + topic, + } = decodePoolTicket(matchRequest); + + // To Block Cancellation + sendNotif([requestorSocketPort], MATCHING_EVENT.MATCHING); + await redisClient.hSet(getPoolKey(requestorUserId), 'pending', 'false'); + + let clause = [`-@userId:(${requestorUserId})`, '@pending:(true)']; + + if (difficulty) { + clause.push(`@difficulty:{${difficulty}}`); + } + + if (topic) { + clause.push(`@topic:{${topic}}`); + } + + // Push UserID clause to the back. + clause = clause.reverse(); + + const searchParams = { + LIMIT: { from: 0, size: 1 }, + SORTBY: { BY: 'timestamp', DIRECTION: 'ASC' }, + } as const; + const requestorParams = { requestorUserId, requestorStreamId, requestorSocketPort }; + + const exactMatches = await redisClient.ft.search(POOL_INDEX, clause.join(' '), searchParams); + const exactMatchFound = await processMatch( + redisClient, + requestorParams, + exactMatches, + 'exact match', + topic, + difficulty + ); + + if (exactMatchFound || !topic || !difficulty) { + // Match found, or Partial search completed + continue; + } + + // Match on Topic + const topicMatches = await redisClient.ft.search( + POOL_INDEX, + clause.filter((v) => !v.startsWith('@difficulty')).join(' '), + searchParams + ); + const topicMatchFound = await processMatch( + redisClient, + requestorParams, + topicMatches, + 'topic', + topic, + difficulty + ); + + if (topicMatchFound) { + continue; + } + + // Match on Difficulty + const difficultyMatches = await redisClient.ft.search( + POOL_INDEX, + clause.filter((v) => !v.startsWith('@topic')).join(' '), + searchParams + ); + const hasDifficultyMatch = await processMatch( + redisClient, + requestorParams, + difficultyMatches, + 'difficulty', + topic, + difficulty + ); + + if (!hasDifficultyMatch) { + // To allow cancellation + await redisClient.hSet(getPoolKey(requestorUserId), 'pending', 'true'); + sendNotif([requestorSocketPort], MATCHING_EVENT.PENDING); + logger.info(`${requestorUserId} is now in mode ${MATCHING_EVENT.PENDING}`); + } + } + } +} + +logger.info('Process Healthy'); + +(function loop() { + if (stopSignal) { + return; + } + + Promise.resolve() + .then(async () => await match()) + .catch((error) => { + if (error !== null) { + const { message, name, cause } = error as Error; + logger.error(JSON.stringify({ message, name, cause })); + } + }) + .then(() => process.nextTick(loop)); +})(); diff --git a/backend/matching/src/ws/events.ts b/backend/matching/src/ws/events.ts new file mode 100644 index 0000000000..a5d63ff38f --- /dev/null +++ b/backend/matching/src/ws/events.ts @@ -0,0 +1,17 @@ +export const WS_EVENT = { + JOIN_ROOM: 'joinRoom', + CANCEL_ROOM: 'cancelRoom', + LEAVE_ROOM: 'leave', + START_QUEUING: 'startQueuing', + DISCONNECT: 'disconnect', +} as const; + +export const MATCHING_EVENT = { + ERROR: 'ERROR', // When match encounters error + QUEUED: 'QUEUED', // When match joins pool + MATCHING: 'MATCHING', // When matching in progress + PENDING: 'PENDING', // When waiting for match + SUCCESS: 'SUCCESS', // When match successful + FAILED: 'FAILED', // When match failed + DISCONNECT: 'DISCONNECT', // To disconnect all sockets in room +} as const; diff --git a/backend/matching/src/ws/handlers.ts b/backend/matching/src/ws/handlers.ts new file mode 100644 index 0000000000..9e4f7138fd --- /dev/null +++ b/backend/matching/src/ws/handlers.ts @@ -0,0 +1,92 @@ +import type { DefaultEventsMap, Server, Socket } from 'socket.io'; + +import { client, logQueueStatus } from '@/lib/db'; +import { logger } from '@/lib/utils'; +import { queueingService } from '@/services'; +import type { IRedisClient, IRequestMatchEvent } from '@/types'; + +import { MATCHING_EVENT, WS_EVENT } from './events'; + +type ISocketIOServer = Server; +type ISocketIOSocket = Socket; + +export const joinRoomHandler = + (socket: ISocketIOSocket) => + (roomId?: string) => { + if (!roomId) { + logger.warn('joinRoom event received without a roomId'); + return; + } + + socket.join(roomId); + logger.info(`Socket ${socket.id} joined room: ${roomId}`); + socket.emit('joinedRoom', roomId); + }; + +export const cancelRoomHandler = + (io: ISocketIOServer, socket: ISocketIOSocket) => + (roomId?: string) => { + if (roomId) { + io.in(roomId).socketsLeave(roomId); + logger.info(`Room ${roomId} has been cancelled and closed.`); + socket.emit('roomCancelled', roomId); + } else { + logger.warn('No room ID provided for cancellation'); + } + }; + +let redisClient: IRedisClient; + +export const queueEventHandler = + (socket: ISocketIOSocket) => + async (payload: Partial) => { + // 1. Invalid Room + if (!payload.roomId) { + const errorMessage = 'Queuing Event triggered without room.'; + logger.warn(errorMessage); + socket.emit(MATCHING_EVENT.ERROR, errorMessage); + return; + } + + // 2. Invalid Request + const { roomId } = payload; + + if ( + !payload.userId || + (!payload.topic && !payload.difficulty) || + (payload.topic && !Array.isArray(payload.topic)) + ) { + const message = `Payload for ${WS_EVENT.START_QUEUING} is invalid.`; + logger.warn(message); + socket.emit(MATCHING_EVENT.ERROR, message); + return; + } + + // 3. Start Queuing + try { + if (!redisClient || !redisClient.isOpen || !redisClient.isReady) { + redisClient = await client.connect(); + } + + const { userId, difficulty, topic } = payload; + const timestamp = `${Date.now()}`; + await queueingService(redisClient, { + userId, + difficulty, + topic, + socketPort: roomId, + timestamp, + }); + socket.emit(MATCHING_EVENT.QUEUED); + await logQueueStatus( + logger, + redisClient, + `[ws::queueEventHandler] Queue Status Before Matching: ` + ); + } catch (error) { + const { name, message, stack, cause } = error as Error; + logger.error({ name, message, stack, cause }, `An error occurred.`); + socket.emit(MATCHING_EVENT.ERROR, 'Error connecting to client'); + return; + } + }; diff --git a/backend/matching/src/ws/index.ts b/backend/matching/src/ws/index.ts new file mode 100644 index 0000000000..aad1ca831e --- /dev/null +++ b/backend/matching/src/ws/index.ts @@ -0,0 +1 @@ +export * from './main'; diff --git a/backend/matching/src/ws/main.ts b/backend/matching/src/ws/main.ts new file mode 100644 index 0000000000..518ad7df55 --- /dev/null +++ b/backend/matching/src/ws/main.ts @@ -0,0 +1,36 @@ +import { createServer } from 'http'; + +import { Server } from 'socket.io'; + +import { UI_HOST } from '@/config'; +import { logger } from '@/lib/utils'; + +import { WS_EVENT } from './events'; +import { cancelRoomHandler, joinRoomHandler, queueEventHandler } from './handlers'; + +export const createWs = (server: ReturnType) => { + const io = new Server(server, { + cors: { + origin: [UI_HOST], + credentials: true, + }, + path: '/matching-socket', + }); + io.on('connection', (socket) => { + logger.info(`Socket ${socket.id} connected`); + + socket.on(WS_EVENT.JOIN_ROOM, joinRoomHandler(socket)); + socket.on(WS_EVENT.CANCEL_ROOM, cancelRoomHandler(io, socket)); + socket.on(WS_EVENT.LEAVE_ROOM, (room?: string) => { + if (room) { + socket.leave(room); + } + }); + socket.on(WS_EVENT.START_QUEUING, queueEventHandler(socket)); + socket.on(WS_EVENT.DISCONNECT, () => { + logger.info(`Client disconnected: ${socket.id}`); + socket.disconnect(); + }); + }); + return io; +}; diff --git a/backend/matching/tsconfig.json b/backend/matching/tsconfig.json new file mode 100644 index 0000000000..c18b7e3c96 --- /dev/null +++ b/backend/matching/tsconfig.json @@ -0,0 +1,110 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig to read more about this file */ + /* Projects */ + // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ + // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ + // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ + // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ + // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ + // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ + /* Language and Environment */ + "target": "ES2022", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ + // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + // "jsx": "preserve", /* Specify what JSX code is generated. */ + // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ + // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ + // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ + // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ + // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ + // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ + // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ + // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ + // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ + /* Modules */ + "module": "commonjs", /* Specify what module code is generated. */ + "rootDir": "./src", /* Specify the root folder within your source files. */ + // "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */ + "baseUrl": ".", /* Specify the base directory to resolve non-relative module names. */ + "paths": { + "@/*": ["./src/*"] + }, /* Specify a set of entries that re-map imports to additional lookup locations. */ + // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ + // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ + // "types": [], /* Specify type package names to be included without being referenced in a source file. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ + // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ + // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ + // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ + // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ + // "resolveJsonModule": true, /* Enable importing .json files. */ + // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ + // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ + /* JavaScript Support */ + // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ + // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ + // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ + /* Emit */ + // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ + // "declarationMap": true, /* Create sourcemaps for d.ts files. */ + // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ + // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ + // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ + // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ + "outDir": "./dist", /* Specify an output folder for all emitted files. */ + // "removeComments": true, /* Disable emitting comments. */ + // "noEmit": true, /* Disable emitting files from a compilation. */ + // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ + // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ + // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ + // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ + // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ + // "newLine": "crlf", /* Set the newline character for emitting files. */ + // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ + // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ + // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ + // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ + // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ + // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ + /* Interop Constraints */ + // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ + // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ + // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ + "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ + // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ + "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ + /* Type Checking */ + "strict": true, /* Enable all strict type-checking options. */ + // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ + // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ + // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ + // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ + // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ + // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ + // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ + // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ + // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ + // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ + // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ + // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ + // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ + // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ + // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ + // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ + /* Completeness */ + // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ + "skipLibCheck": true /* Skip type checking all .d.ts files. */ + }, + "exclude": [ + "drizzle.*.*ts" + ], + "ts-node": { + "swc": true, + "require": ["tsconfig-paths/register"] + } +} \ No newline at end of file diff --git a/backend/question/.dockerignore b/backend/question/.dockerignore new file mode 100644 index 0000000000..d26c7464b6 --- /dev/null +++ b/backend/question/.dockerignore @@ -0,0 +1,2 @@ +node_modules +dist/ \ No newline at end of file diff --git a/backend/question/.env.compose b/backend/question/.env.compose new file mode 100644 index 0000000000..706bc12eaf --- /dev/null +++ b/backend/question/.env.compose @@ -0,0 +1,10 @@ +# To be injected by Docker Compose +# PEERPREP_UI_HOST="http://frontend:3000" + +EXPRESS_PORT=9002 +EXPRESS_DB_HOST="question-db" +EXPRESS_DB_PORT=5432 +POSTGRES_DB="question" +POSTGRES_USER="peerprep-qn-express" +POSTGRES_PASSWORD="Xk8qEcEI2sizjfEn/lF6mLqiyBECjIHY3q6sdXf9poQ=" +PGDATA="/data/qn-db" diff --git a/backend/question/.env.docker b/backend/question/.env.docker new file mode 100644 index 0000000000..959eb41a4c --- /dev/null +++ b/backend/question/.env.docker @@ -0,0 +1,9 @@ +PEERPREP_UI_HOST=http://host.docker.internal:5173 + +EXPRESS_PORT=9002 +EXPRESS_DB_HOST=host.docker.internal +EXPRESS_DB_PORT=5433 +POSTGRES_DB=question +POSTGRES_USER=peerprep-qn-express +POSTGRES_PASSWORD=Xk8qEcEI2sizjfEn/lF6mLqiyBECjIHY3q6sdXf9poQ= +PGDATA=/data/qn-db diff --git a/backend/question/.env.local b/backend/question/.env.local new file mode 100644 index 0000000000..67cf22af19 --- /dev/null +++ b/backend/question/.env.local @@ -0,0 +1,9 @@ +PEERPREP_UI_HOST="http://localhost:5173" + +EXPRESS_PORT=9002 +EXPRESS_DB_HOST="localhost" +EXPRESS_DB_PORT=5433 +POSTGRES_DB="question" +POSTGRES_USER="peerprep-qn-express" +POSTGRES_PASSWORD="Xk8qEcEI2sizjfEn/lF6mLqiyBECjIHY3q6sdXf9poQ=" +PGDATA="/data/qn-db" diff --git a/backend/question/README.md b/backend/question/README.md new file mode 100644 index 0000000000..db1ada2f7e --- /dev/null +++ b/backend/question/README.md @@ -0,0 +1,174 @@ +# Template Service + +This directory contains the code for the Template +Service. + + +# Questions Service + +## Running with Docker (Standalone) + +1. Run this command to build: + ```sh + docker build \ + -t question-express-local \ + --build-arg port=9002 \ + -f express.Dockerfile . + ``` +2. Run this command, from the root folder: + ```sh + make db-up + ``` + +3. Run the necessary migrate and seed commands, if you haven't yet. + +4. Run this command to expose the container: + ```sh + docker run -p 9002:9002 --env-file ./.env.docker question-express-local + ``` + +## Running with Docker-Compose (Main config) + +Edit the variables in the `.env.compose` file and run `make up` from the root folder. + +Any startup instructions will be run from `entrypoint.sh` instead. + + + + + +## Database + +We use: + +- PostgreSQL 16 for the database. To run it, we use: + - Docker to run the database, as well as inject any user-defined + configurations or SQL files into the Docker image. + - Docker-Compose to run the database, as well as any other + services this API microservice may depend on. +- [**Drizzle**](https://orm.drizzle.team/) for the ORM. + +Follow the instructions below for the setup, as well as to learn how to work with the database. + +### Setup + +1. Install Docker Desktop on your device. Launch it. + +2. To verify that it is launched and installed correctly, run the + following in your terminal: + + ```bash + docker --version + ``` + + If the command does not error, and outputs a version, proceed to + the next step. + +3. Inspect the `docker-compose.yml` file. It + should look like this: + + ```yml + services: + # ... + postgres: + # ... + volumes: + - "template-db-docker:/data/template-db" + # - ./init.sql:/docker-entrypoint-initdb.d/init.sql + ports: + - "5431:5432" + restart: unless-stopped + + volumes: + template-db-docker: + external: true + ``` + + We observe that this Database relies on a + Docker Volume. Replace all instances of + `template-db-docker` with your desired + volume name. + +4. Then, create the Docker Volume with + the following command: + + ```bash + # in this case, the command is + # docker volume create template-db-docker + docker volume create + ``` +5. Finally, create the Database Container: + + ```bash + docker-compose up -d + ``` + +6. To bring it down, run this command: + + ```bash + docker-compose down + ``` + +### Schema + +We maintain the schema in the `src/lib/db/schema.ts` file. + +Refer to the Drizzle documentation to learn how +to properly define schemas. Then, insert your +schemas into the file. + +### Migration + +After you have created/updated your schemas in +the file, persist them to the Database with +Migrations. + +1. Configure your credentials (port, + password, ...) in: + + - `drizzle.config.ts` + - `drizzle.migrate.mts`. + - `src/lib/db/index.ts`. + + In the future, we may wish to migrate these + credentials to environment variables. + +2. Run the `npm run db:generate` command to +generate your `.sql` Migration Files under the +`drizzle` folder. + +3. Rename your + `_.sql` file + to `_.sql`. + + For example: + - Generated: `0000_dazzling_squirrel.sql` + - Renamed: `0000_initial_schema.sql`. + + Then, rename the + `meta/_journal.json` tag from + `0000_dazzling_squirrel` to + `0000_initial_schema` as well. Replace the + migration number and name with the one you + used. + +4. Finally, run the migration with this: + + ```bash + npm run db:migrate + ``` + +### Connecting with the DB + +1. Import the `db` instance from `lib/db`. +2. Use the Drizzle APIs and the tables defined in + `src/lib/schema.ts` to interact with the + tables. + + ```ts + import { db, tableName } from '../lib/db'; + + const route = async (req, res) => { + await db.select().from(tableName); //... + } + ``` diff --git a/backend/question/drizzle.config.ts b/backend/question/drizzle.config.ts new file mode 100644 index 0000000000..b95650e9d9 --- /dev/null +++ b/backend/question/drizzle.config.ts @@ -0,0 +1,16 @@ +import { defineConfig } from 'drizzle-kit'; + +const config = { + host: process.env.EXPRESS_DB_HOST!, + port: Number.parseInt(process.env.EXPRESS_DB_PORT!), + database: process.env.POSTGRES_DB!, + user: process.env.POSTGRES_USER, + password: process.env.POSTGRES_PASSWORD, +}; + +export default defineConfig({ + schema: './src/lib/db/schema.ts', + out: './drizzle', + dialect: 'postgresql', + dbCredentials: config, +}); diff --git a/backend/question/drizzle/0000_initial_schema.sql b/backend/question/drizzle/0000_initial_schema.sql new file mode 100644 index 0000000000..071be9e4f0 --- /dev/null +++ b/backend/question/drizzle/0000_initial_schema.sql @@ -0,0 +1,21 @@ +DO $$ BEGIN + CREATE TYPE "public"."action" AS ENUM('SEED'); +EXCEPTION + WHEN duplicate_object THEN null; +END $$; +--> statement-breakpoint +CREATE TABLE IF NOT EXISTS "admin" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + "created_at" timestamp DEFAULT now(), + "action" "action" NOT NULL +); +--> statement-breakpoint +CREATE TABLE IF NOT EXISTS "questions" ( + "id" serial PRIMARY KEY NOT NULL, + "title" varchar(255) NOT NULL, + "difficulty" varchar(50) NOT NULL, + "topic" varchar(255)[] NOT NULL, + "description" text NOT NULL, + "created_at" timestamp (6) with time zone DEFAULT now(), + "updated_at" timestamp (6) with time zone DEFAULT now() +); diff --git a/backend/question/drizzle/0001_attempt_history.sql b/backend/question/drizzle/0001_attempt_history.sql new file mode 100644 index 0000000000..ec5e9ec641 --- /dev/null +++ b/backend/question/drizzle/0001_attempt_history.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS "question_attempts" ( + "attempt_id" serial PRIMARY KEY NOT NULL, + "question_id" integer NOT NULL, + "user_id_1" uuid NOT NULL, + "user_id_2" uuid, + "code" text NOT NULL, + "timestamp" timestamp (6) with time zone DEFAULT now(), + "language" varchar(50) NOT NULL +); +--> statement-breakpoint +CREATE UNIQUE INDEX IF NOT EXISTS "unique_users_attempt" ON "question_attempts" USING btree ("question_id","user_id_1","user_id_2"); \ No newline at end of file diff --git a/backend/question/drizzle/meta/0000_snapshot.json b/backend/question/drizzle/meta/0000_snapshot.json new file mode 100644 index 0000000000..6fa977642d --- /dev/null +++ b/backend/question/drizzle/meta/0000_snapshot.json @@ -0,0 +1,109 @@ +{ + "id": "84b2ca8d-3021-496f-8769-bbc4dada6468", + "prevId": "00000000-0000-0000-0000-000000000000", + "version": "7", + "dialect": "postgresql", + "tables": { + "public.admin": { + "name": "admin", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false, + "default": "now()" + }, + "action": { + "name": "action", + "type": "action", + "typeSchema": "public", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} + }, + "public.questions": { + "name": "questions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "serial", + "primaryKey": true, + "notNull": true + }, + "title": { + "name": "title", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "difficulty": { + "name": "difficulty", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "topic": { + "name": "topic", + "type": "varchar(255)[]", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "created_at": { + "name": "created_at", + "type": "timestamp (6) with time zone", + "primaryKey": false, + "notNull": false, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp (6) with time zone", + "primaryKey": false, + "notNull": false, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} + } + }, + "enums": { + "public.action": { + "name": "action", + "schema": "public", + "values": [ + "SEED" + ] + } + }, + "schemas": {}, + "sequences": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} \ No newline at end of file diff --git a/backend/question/drizzle/meta/0001_snapshot.json b/backend/question/drizzle/meta/0001_snapshot.json new file mode 100644 index 0000000000..18e41ae448 --- /dev/null +++ b/backend/question/drizzle/meta/0001_snapshot.json @@ -0,0 +1,190 @@ +{ + "id": "afa9ccaa-137c-47d3-acb0-ab1e2208038e", + "prevId": "84b2ca8d-3021-496f-8769-bbc4dada6468", + "version": "7", + "dialect": "postgresql", + "tables": { + "public.admin": { + "name": "admin", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false, + "default": "now()" + }, + "action": { + "name": "action", + "type": "action", + "typeSchema": "public", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} + }, + "public.question_attempts": { + "name": "question_attempts", + "schema": "", + "columns": { + "attempt_id": { + "name": "attempt_id", + "type": "serial", + "primaryKey": true, + "notNull": true + }, + "question_id": { + "name": "question_id", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "user_id_1": { + "name": "user_id_1", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "user_id_2": { + "name": "user_id_2", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "code": { + "name": "code", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "timestamp": { + "name": "timestamp", + "type": "timestamp (6) with time zone", + "primaryKey": false, + "notNull": false, + "default": "now()" + }, + "language": { + "name": "language", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + } + }, + "indexes": { + "unique_users_attempt": { + "name": "unique_users_attempt", + "columns": [ + { + "expression": "question_id", + "isExpression": false, + "asc": true, + "nulls": "last" + }, + { + "expression": "user_id_1", + "isExpression": false, + "asc": true, + "nulls": "last" + }, + { + "expression": "user_id_2", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": true, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} + }, + "public.questions": { + "name": "questions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "serial", + "primaryKey": true, + "notNull": true + }, + "title": { + "name": "title", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "difficulty": { + "name": "difficulty", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "topic": { + "name": "topic", + "type": "varchar(255)[]", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "created_at": { + "name": "created_at", + "type": "timestamp (6) with time zone", + "primaryKey": false, + "notNull": false, + "default": "now()" + }, + "updated_at": { + "name": "updated_at", + "type": "timestamp (6) with time zone", + "primaryKey": false, + "notNull": false, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} + } + }, + "enums": { + "public.action": { + "name": "action", + "schema": "public", + "values": [ + "SEED" + ] + } + }, + "schemas": {}, + "sequences": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} \ No newline at end of file diff --git a/backend/question/drizzle/meta/_journal.json b/backend/question/drizzle/meta/_journal.json new file mode 100644 index 0000000000..d9fe8b9aa5 --- /dev/null +++ b/backend/question/drizzle/meta/_journal.json @@ -0,0 +1,20 @@ +{ + "version": "7", + "dialect": "postgresql", + "entries": [ + { + "idx": 0, + "version": "7", + "when": 1728143550719, + "tag": "0000_initial_schema", + "breakpoints": true + }, + { + "idx": 1, + "version": "7", + "when": 1730553826248, + "tag": "0001_attempt_history", + "breakpoints": true + } + ] +} \ No newline at end of file diff --git a/backend/question/entrypoint.sh b/backend/question/entrypoint.sh new file mode 100644 index 0000000000..61c411f483 --- /dev/null +++ b/backend/question/entrypoint.sh @@ -0,0 +1,13 @@ +#!/bin/sh + +# Drizzle will handle its own logic to remove conflicts +npm run db:prod:migrate + +# Checks admin table and will not seed if data exists +npm run db:prod:seed + +rm -rf drizzle src tsconfig.json + +npm uninstall tsx drizzle-kit + +npm run start \ No newline at end of file diff --git a/backend/question/express.Dockerfile b/backend/question/express.Dockerfile new file mode 100644 index 0000000000..452567bc05 --- /dev/null +++ b/backend/question/express.Dockerfile @@ -0,0 +1,26 @@ +FROM node:lts-alpine AS build +WORKDIR /data/question-express +COPY package*.json ./ +RUN npm install +ARG env +COPY . . +RUN npm run build + +FROM node:lts-alpine AS production +WORKDIR /data/question-express +COPY --from=build /data/question-express/package*.json ./ +COPY --from=build --chown=node:node /data/question-express/dist ./dist + +RUN npm ci --omit=dev + +# For migration +RUN npm install tsx drizzle-kit +COPY drizzle ./drizzle +COPY src/lib/db/ ./src/lib/db +COPY src/config.ts ./src +COPY tsconfig.json . +COPY entrypoint.sh . + +ARG port +EXPOSE ${port} +ENTRYPOINT [ "/bin/sh", "entrypoint.sh" ] \ No newline at end of file diff --git a/backend/question/package.json b/backend/question/package.json new file mode 100644 index 0000000000..7bccc133b9 --- /dev/null +++ b/backend/question/package.json @@ -0,0 +1,51 @@ +{ + "name": "question", + "version": "1.0.0", + "main": "dist/index.js", + "scripts": { + "dev": "env-cmd -f .env.local nodemon src/index.ts | pino-pretty", + "build": "tsc && tsc-alias", + "start": "node dist/index.js", + "build:local": "env-cmd -f .env.local tsc && tsc-alias", + "start:local": "env-cmd -f .env.local node dist/index.js", + "db:generate": "env-cmd -f .env.local drizzle-kit generate", + "db:migrate": "env-cmd -f .env.local tsx ./src/lib/db/migrate.ts", + "db:seed": "env-cmd -f .env.local tsx ./src/lib/db/seed.ts", + "db:prod:migrate": "tsx ./src/lib/db/migrate.ts", + "db:prod:seed": "tsx ./src/lib/db/seed.ts", + "db:inspect": "env-cmd -f .env.local drizzle-kit studio", + "fmt": "prettier --config .prettierrc src --write", + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "description": "", + "dependencies": { + "cors": "^2.8.5", + "dotenv": "^16.4.5", + "drizzle-orm": "^0.33.0", + "env-cmd": "^10.1.0", + "express": "^4.21.0", + "helmet": "^8.0.0", + "http-status-codes": "^2.3.0", + "pino": "^9.4.0", + "pino-http": "^10.3.0", + "postgres": "^3.4.4", + "uuid": "^11.0.2" + }, + "devDependencies": { + "@swc/core": "^1.7.26", + "@swc/helpers": "^0.5.13", + "@types/cors": "^2.8.17", + "@types/express": "^4.17.21", + "@types/node": "^22.5.5", + "drizzle-kit": "^0.24.2", + "nodemon": "^3.1.4", + "pino-pretty": "^11.2.2", + "ts-node": "^10.9.2", + "tsc-alias": "^1.8.10", + "tsconfig-paths": "^4.2.0", + "tsx": "^4.19.1" + } +} diff --git a/backend/question/src/config.ts b/backend/question/src/config.ts new file mode 100644 index 0000000000..1004f29e4c --- /dev/null +++ b/backend/question/src/config.ts @@ -0,0 +1,15 @@ +import 'dotenv/config'; + +export const UI_HOST = process.env.PEERPREP_UI_HOST!; + +export const EXPRESS_PORT = process.env.EXPRESS_PORT; + +export const dbConfig = { + host: process.env.EXPRESS_DB_HOST!, + port: Number.parseInt(process.env.EXPRESS_DB_PORT!), + database: process.env.POSTGRES_DB!, + user: process.env.POSTGRES_USER, + password: process.env.POSTGRES_PASSWORD, +}; + +export const LOAD_TEST_POD = process.env.LOAD_TEST_POD || 'http://question-service-load-test'; diff --git a/backend/question/src/controller/attempted-controller.ts b/backend/question/src/controller/attempted-controller.ts new file mode 100644 index 0000000000..dfd685e718 --- /dev/null +++ b/backend/question/src/controller/attempted-controller.ts @@ -0,0 +1,77 @@ +import { Request, Response } from 'express'; +import { StatusCodes } from 'http-status-codes'; + +import { logger } from '@/lib/utils'; +import { isValidUUID } from '@/lib/uuid'; +import { getQuestionAttempts } from '@/services/get/get-attempts'; +import { addAttempt } from '@/services/post/addAttempt'; + +// Define the expected request body structure +interface AddAttemptRequestBody { + questionId: number; + userId1: string; + userId2?: string; // Optional if userId2 is not always required + code: string; + language: string; +} + +// Controller function to handle creating an attempt +export const createAttempt = async ( + req: Request, + res: Response +) => { + const { questionId, userId1, userId2, code, language } = req.body; + + // Basic validation for required fields + if (!questionId || !userId1 || !code || !language) { + return res.status(400).json({ error: 'Missing required fields' }); + } + + try { + // Call the service function to add the attempt + const result = await addAttempt({ + questionId, + userId1, + userId2, + code, + language, + }); + + // Respond with success + res.status(StatusCodes.OK).json({ message: 'Attempt added successfully', result }); + } catch (err) { + const { name, message, stack, cause } = err as Error; + logger.error({ name, message, stack, cause }, 'Error adding attempt'); + + // Enhanced error response with error details + res.status(500).json({ + error: 'Error adding attempt', + details: err instanceof Error ? err.message : 'Unknown error', + }); + } +}; + +export const getAttempts = async ( + req: Request[0]>, unknown>, + res: Response +) => { + const { questionId, userId, limit, offset } = req.body; + + if (!questionId || isNaN(questionId) || !userId || !isValidUUID(userId)) { + return res.status(StatusCodes.UNPROCESSABLE_ENTITY).json('Malformed Request'); + } + + try { + const result = await getQuestionAttempts({ questionId, userId, limit, offset }); + return res.status(StatusCodes.OK).json(result); + } catch (err) { + const { name, message, stack, cause } = err as Error; + logger.error({ name, message, stack, cause }, 'Error retrieving attempts'); + + // Enhanced error response with error details + res.status(StatusCodes.INTERNAL_SERVER_ERROR).json({ + error: 'Error retrieving attempts', + details: err instanceof Error ? err.message : 'Unknown error', + }); + } +}; diff --git a/backend/question/src/controller/question-controller.ts b/backend/question/src/controller/question-controller.ts new file mode 100644 index 0000000000..6e59ad4d27 --- /dev/null +++ b/backend/question/src/controller/question-controller.ts @@ -0,0 +1,185 @@ +import type { Request, Response } from 'express'; +import { StatusCodes } from 'http-status-codes'; + +import { + getDifficultiesService, + getQuestionDetailsService, + getQuestionsService, + getTopicsService, + searchQuestionsByTitleService, +} from '@/services/get/index'; +import type { IGetQuestionPayload, IGetQuestionsPayload } from '@/services/get/types'; +import { + createQuestionService, + deleteQuestionService, + updateQuestionService, +} from '@/services/post'; +import type { + ICreateQuestionPayload, + IDeleteQuestionPayload, + IUpdateQuestionPayload, +} from '@/services/post/types'; + +export const getQuestions = async (req: Request, res: Response): Promise => { + const { questionName, difficulty, topic, pageNum, recordsPerPage, userId } = req.query; + const payload: IGetQuestionsPayload = { + questionName: questionName as string, + difficulty: difficulty as string, + topic: topic as Array, + pageNum: parseInt(pageNum as string) || 0, + recordsPerPage: parseInt(recordsPerPage as string) || 20, + userId: userId as string, + }; + + try { + const result = await getQuestionsService(payload); + + if (!result.data || result.code >= 400) { + return res.status(result.code).json({ + message: result.error?.message ?? 'An error occurred', + }); + } + + return res.status(result.code).json(result.data); + } catch (error) { + return res + .status(StatusCodes.INTERNAL_SERVER_ERROR) + .json({ success: false, message: 'An error occurred', error }); + } +}; + +export const getQuestionDetails = async (req: Request, res: Response): Promise => { + const payload: IGetQuestionPayload = { + questionId: parseInt(req.params.questionId), + }; + + try { + const result = await getQuestionDetailsService(payload); + + if (!result.data || result.code >= 400) { + return res.status(result.code).json({ + message: result.error?.message ?? 'An error occurred', + }); + } + + return res.status(result.code).json(result.data); + } catch (error) { + return res + .status(StatusCodes.INTERNAL_SERVER_ERROR) + .json({ success: false, message: 'An error occurred', error }); + } +}; + +export const searchQuestionsByTitle = async (req: Request, res: Response): Promise => { + const { title } = req.query; + const page = parseInt(req.query.page as string) || 1; + const limit = parseInt(req.query.limit as string) || 10; + + if (!title) { + return res + .status(StatusCodes.UNPROCESSABLE_ENTITY) + .json({ success: false, message: 'Title is required' }); + } + + try { + const result = await searchQuestionsByTitleService(title.toString(), page, limit); + return res.status(result.code).json(result); + } catch (error) { + return res + .status(StatusCodes.INTERNAL_SERVER_ERROR) + .json({ success: false, message: 'An error occurred', error }); + } +}; + +export const createQuestion = async (req: Request, res: Response): Promise => { + const { title, description, difficulty, topics } = req.body; + + if (!title || !description || !difficulty) { + return res.status(StatusCodes.UNPROCESSABLE_ENTITY).json('Malformed'); + } + + const payload: ICreateQuestionPayload = { + title, + description, + difficulty, + topics, + }; + + try { + const result = await createQuestionService(payload); + + if (!result.data || result.code >= 400) { + return res.status(result.code).json({ + message: result.message ?? 'An error occurred', + }); + } + + return res.status(result.code).json(result.data); + } catch (error) { + return res + .status(StatusCodes.INTERNAL_SERVER_ERROR) + .json({ success: false, message: 'An error occurred', error }); + } +}; + +export const updateQuestion = async (req: Request, res: Response): Promise => { + const { title, description, difficulty, topics } = req.body; + + if (!title && !description && !difficulty && (!topics || !Array.isArray(topics))) { + return res.status(StatusCodes.UNPROCESSABLE_ENTITY).json('Malformed'); + } + + const payload: IUpdateQuestionPayload = { + id: parseInt(req.params.questionId), + title, + description, + difficulty, + topics, + }; + + try { + const result = await updateQuestionService(payload); + return res.status(result.code).json(result); + } catch (error) { + return res + .status(StatusCodes.INTERNAL_SERVER_ERROR) + .json({ success: false, message: 'An error occurred', error }); + } +}; + +export const deleteQuestion = async (req: Request, res: Response): Promise => { + const payload: IDeleteQuestionPayload = { + id: parseInt(req.params.questionId), + }; + + try { + const result = await deleteQuestionService(payload); + return res.status(result.code).json(result.success ? 'Ok' : result.message); + } catch (error) { + return res + .status(StatusCodes.INTERNAL_SERVER_ERROR) + .json({ success: false, message: 'An error occurred', error }); + } +}; + +export const getTopics = async (req: Request, res: Response): Promise => { + try { + const result = await getTopicsService(); + return res.status(result.code).json(result.data); + } catch (error) { + return res + .status(StatusCodes.INTERNAL_SERVER_ERROR) + .json({ success: false, message: 'An error occurred', error }); + } +}; + +export const getDifficulties = async (req: Request, res: Response): Promise => { + try { + const result = await getDifficultiesService(); + return res.status(result.code).json(result.data); + } catch (error) { + return res + .status(StatusCodes.INTERNAL_SERVER_ERROR) + .json({ success: false, message: 'An error occurred', error }); + } +}; diff --git a/backend/question/src/controller/unattempted-controller.ts b/backend/question/src/controller/unattempted-controller.ts new file mode 100644 index 0000000000..47743ef9e8 --- /dev/null +++ b/backend/question/src/controller/unattempted-controller.ts @@ -0,0 +1,60 @@ +// src/controllers/questionsController.ts +// src/controllers/unattempted-controller.ts +import { Request, Response } from 'express'; + +import { isValidUUID } from '@/lib/uuid'; + +import { getRandomQuestion } from '../services/get/get-random-question'; + +// Define types for query parameters +interface UnattemptedQuestionQuery { + userId1: string; + userId2: string; + topics?: string | Array; + difficulty?: string; +} + +export const fetchRandomQuestionByIncreasingAttemptCount = async ( + req: Request, unknown>, + res: Response +) => { + const { userId1, userId2, topics: payloadTopics, difficulty } = req.body; + + if (userId1 === undefined || !isValidUUID(userId1)) { + return res.status(400).json({ error: 'Invalid or missing userId1. It must be a valid id.' }); + } + + if (!userId2 || !isValidUUID(userId2)) { + return res.status(400).json({ error: 'Invalid userId2. It must be a valid id if provided.' }); + } + + // Ensure topics is an array of strings + const topics = + typeof payloadTopics === 'string' + ? payloadTopics.split(',') + : Array.isArray(payloadTopics) + ? payloadTopics.filter((topic) => !!topic) + : undefined; + + try { + const question = await getRandomQuestion({ + userId1, + userId2, + topics, + difficulty, + }); + + if (question) { + res.json(question); + return; + } + + res.status(404).json({ message: 'No unattempted questions found' }); + return; + } catch (error) { + console.error('Error fetching unattempted question:', error); // Log the actual error + res + .status(500) + .json({ error: 'Error fetching unattempted question', details: (error as any).message }); + } +}; diff --git a/backend/question/src/index.ts b/backend/question/src/index.ts new file mode 100644 index 0000000000..6f21150c99 --- /dev/null +++ b/backend/question/src/index.ts @@ -0,0 +1,11 @@ +import { EXPRESS_PORT } from '@/config'; +import { logger } from '@/lib/utils'; +import app, { dbHealthCheck } from '@/server'; + +const port = Number.parseInt(EXPRESS_PORT ?? '8001'); + +const listenMessage = `App listening on port: ${port}`; +app.listen(port, () => { + void dbHealthCheck(); + logger.info(listenMessage); +}); diff --git a/backend/question/src/lib/db/index.ts b/backend/question/src/lib/db/index.ts new file mode 100644 index 0000000000..2fbbec3b0d --- /dev/null +++ b/backend/question/src/lib/db/index.ts @@ -0,0 +1,16 @@ +import { drizzle } from 'drizzle-orm/postgres-js'; +import postgres from 'postgres'; + +export const config = { + host: process.env.EXPRESS_DB_HOST!, + port: Number.parseInt(process.env.EXPRESS_DB_PORT!), + database: process.env.POSTGRES_DB, + user: process.env.POSTGRES_USER, + password: process.env.POSTGRES_PASSWORD, +}; + +const queryClient = postgres(config); + +export const db = drizzle(queryClient); + +export * from './schema'; diff --git a/backend/question/src/lib/db/migrate.ts b/backend/question/src/lib/db/migrate.ts new file mode 100644 index 0000000000..a012ab160a --- /dev/null +++ b/backend/question/src/lib/db/migrate.ts @@ -0,0 +1,21 @@ +import { drizzle } from 'drizzle-orm/postgres-js'; +import { migrate } from 'drizzle-orm/postgres-js/migrator'; +import postgres from 'postgres'; + +const config = { + host: process.env.EXPRESS_DB_HOST!, + port: Number.parseInt(process.env.EXPRESS_DB_PORT!), + database: process.env.POSTGRES_DB, + user: process.env.POSTGRES_USER, + password: process.env.POSTGRES_PASSWORD, +}; +const migrationConnection = postgres({ ...config, max: 1 }); + +const db = drizzle(migrationConnection); + +const main = async () => { + await migrate(db, { migrationsFolder: 'drizzle' }); + await migrationConnection.end(); +}; + +void main(); diff --git a/backend/question/src/lib/db/sample-data/questions.ts b/backend/question/src/lib/db/sample-data/questions.ts new file mode 100644 index 0000000000..8816221270 --- /dev/null +++ b/backend/question/src/lib/db/sample-data/questions.ts @@ -0,0 +1,192 @@ +export const questionDetails = [ + { + id: 1, + title: 'Reverse a String', + description: + 'Write a function that reverses a string. The input string is given as an array of characters `s`. You must do this by modifying the input array in-place with O(1) extra memory.\n\n**Example 1:**\n\nInput: `s = ["h","e","l","l","o"]`\n\nOutput: `["o","l","l","e","h"]`\n\n**Example 2:**\n\nInput: `s = ["H","a","n","n","a","h"]`\n\nOutput: `["h","a","n","n","a","H"]`\n\n**Constraints:**\n\n* `1 <= s.length <= 105`\n\n* `s[i]` is a printable ASCII character.', + topics: ['Strings', 'Algorithms'], + difficulty: 'Easy', + leetcode: 'https://leetcode.com/problems/reverse-string/', + }, + { + id: 2, + title: 'Linked List Cycle Detection', + description: 'Implement a function to detect if a linked list contains a cycle.', + topics: ['Data Structures', 'Algorithms'], + difficulty: 'Easy', + leetcode: 'https://leetcode.com/problems/linked-list-cycle/', + }, + { + id: 3, + title: 'Roman to Integer', + description: 'Given a Roman numeral, convert it to an integer.', + topics: ['Algorithms'], + difficulty: 'Easy', + leetcode: 'https://leetcode.com/problems/roman-to-integer/', + }, + { + id: 4, + title: 'Add Binary', + description: 'Given two binary strings `a` and `b`, return their sum as a binary string.', + topics: ['Bit Manipulation', 'Algorithms'], + difficulty: 'Easy', + leetcode: 'https://leetcode.com/problems/add-binary/', + }, + { + id: 5, + title: 'Fibonacci Number', + description: + 'The Fibonacci numbers, commonly denoted `F(n)`, form a sequence such that each number is the sum of the two preceding ones, starting from 0 and 1. That is:\n\n* `F(0) = 0`, `F(1) = 1`\n\n* `F(n) = F(n - 1) + F(n - 2)`, for `n > 1`\n\nGiven `n`, calculate `F(n)`.', + topics: ['Recursion', 'Algorithms'], + difficulty: 'Easy', + leetcode: 'https://leetcode.com/problems/fibonacci-number/', + }, + { + id: 6, + title: 'Implement Stack using Queues', + description: + 'Implement a last-in-first-out (LIFO) stack using only two queues. The implemented stack should support all the functions of a normal stack (push, top, pop, and empty).', + topics: ['Data Structures'], + difficulty: 'Easy', + leetcode: 'https://leetcode.com/problems/implement-stack-using-queues/', + }, + { + id: 7, + title: 'Combine Two Tables', + description: + 'Given table `Person` with columns `personId`, `lastName`, and `firstName`, and table `Address` with columns `addressId`, `personId`, `city`, and `state`, write a solution to report the first name, last name, city, and state of each person in the `Person` table. If the address of a `personId` is not present in the `Address` table, report `null` instead.', + topics: ['Databases'], + difficulty: 'Easy', + leetcode: 'https://leetcode.com/problems/combine-two-tables/', + }, + { + id: 8, + title: 'Repeated DNA Sequences', + description: + 'Given a string `s` that represents a DNA sequence, return all the 10-letter-long sequences (substrings) that occur more than once in a DNA molecule. You may return the answer in any order.', + topics: ['Algorithms', 'Bit Manipulation'], + difficulty: 'Medium', + leetcode: 'https://leetcode.com/problems/repeated-dna-sequences/', + }, + { + id: 9, + title: 'Course Schedule', + description: + 'There are a total of `numCourses` courses you have to take, labeled from 0 to `numCourses - 1`. You are given an array `prerequisites` where `prerequisites[i] = [ai, bi]` indicates that you must take course `bi` first if you want to take course `ai`. Return true if you can finish all courses. Otherwise, return false.', + topics: ['Data Structures', 'Algorithms'], + difficulty: 'Medium', + leetcode: 'https://leetcode.com/problems/course-schedule/', + }, + { + id: 10, + title: 'LRU Cache Design', + description: 'Design and implement an LRU (Least Recently Used) cache.', + topics: ['Data Structures'], + difficulty: 'Medium', + leetcode: 'https://leetcode.com/problems/lru-cache/', + }, + { + id: 11, + title: 'Longest Common Subsequence', + description: + 'Given two strings `text1` and `text2`, return the length of their longest common subsequence. If there is no common subsequence, return 0.\n\nA subsequence of a string is a new string generated from the original string with some characters (can be none) deleted without changing the relative order of the remaining characters.\n\nFor example, "ace" is a subsequence of "abcde". A common subsequence of two strings is a subsequence that is common to both strings.', + topics: ['Strings', 'Algorithms'], + difficulty: 'Medium', + leetcode: 'https://leetcode.com/problems/longest-common-subsequence/', + }, + { + id: 12, + title: 'Rotate Image', + description: + 'You are given an `n x n` 2D matrix representing an image, rotate the image by 90 degrees (clockwise).', + topics: ['Arrays', 'Algorithms'], + difficulty: 'Medium', + leetcode: 'https://leetcode.com/problems/rotate-image/', + }, + { + id: 13, + title: 'Airplane Seat Assignment Probability', + description: + 'n passengers board an airplane with exactly n seats. The first passenger has lost the ticket and picks a seat randomly. After that, the rest of the passengers will:\n\n- Take their own seat if it is still available\n- Pick other seats randomly when they find their seat occupied\n\nReturn the probability that the nth person gets their own seat.', + topics: ['Brainteaser'], + difficulty: 'Medium', + leetcode: 'https://leetcode.com/problems/airplane-seat-assignment-probability/', + }, + { + id: 14, + title: 'Validate Binary Search Tree', + description: + 'Given the root of a binary tree, determine if it is a valid binary search tree (BST).', + topics: ['Data Structures', 'Algorithms'], + difficulty: 'Medium', + leetcode: 'https://leetcode.com/problems/validate-binary-search-tree/', + }, + { + id: 15, + title: 'Sliding Window Maximum', + description: + 'You are given an array of integers `nums`. There is a sliding window of size `k` which is moving from the very left of the array to the very right. You can only see the `k` numbers in the window. Each time the sliding window moves right by one position.\n\nReturn the max sliding window.', + topics: ['Arrays', 'Algorithms'], + difficulty: 'Hard', + leetcode: 'https://leetcode.com/problems/sliding-window-maximum/', + }, + { + id: 16, + title: 'N-Queen Problem', + description: + "The n-queens puzzle is the problem of placing n queens on an `n x n` chessboard such that no two queens attack each other.\n\nGiven an integer `n`, return all distinct solutions to the n-queens puzzle. You may return the answer in any order.\n\nEach solution contains a distinct board configuration of the n-queens' placement, where 'Q' and '.' both indicate a queen and an empty space, respectively.", + topics: ['Algorithms'], + difficulty: 'Hard', + leetcode: 'https://leetcode.com/problems/n-queens/', + }, + { + id: 17, + title: 'Serialize and Deserialize a Binary Tree', + description: + 'Serialization is the process of converting a data structure or object into a sequence of bits so that it can be stored in a file or memory buffer or transmitted across a network connection link to be reconstructed later in the same or another computer environment.\n\nDesign an algorithm to serialize and deserialize a binary tree. There is no restriction on how your serialization/deserialization algorithm should work. You just need to ensure that a binary tree can be serialized to a string and this string can be deserialized to the original tree structure.', + topics: ['Data Structures', 'Algorithms'], + difficulty: 'Hard', + leetcode: 'https://leetcode.com/problems/serialize-and-deserialize-binary-tree/', + }, + { + id: 18, + title: 'Wildcard Matching', + description: + "Given an input string `s` and a pattern `p`, implement wildcard pattern matching with support for '?' and '*' where:\n\n- '?' Matches any single character\n- '*' Matches any sequence of characters (including the empty sequence)\n\nThe matching should cover the entire input string (not partial).", + topics: ['Strings', 'Algorithms'], + difficulty: 'Hard', + leetcode: 'https://leetcode.com/problems/wildcard-matching/', + }, + { + id: 19, + title: 'Chalkboard XOR Game', + description: + 'You are given an array of integers `nums` representing the numbers written on a chalkboard. Alice and Bob take turns erasing exactly one number from the chalkboard, with Alice starting first. If erasing a number causes the bitwise XOR of all the elements of the chalkboard to become 0, then that player loses. The bitwise XOR of one element is that element itself, and the bitwise XOR of no elements is 0.\n\nAlso, if any player starts their turn with the bitwise XOR of all the elements of the chalkboard equal to 0, then that player wins.\n\nReturn `true` if and only if Alice wins the game, assuming both players play optimally.', + topics: ['Brainteaser'], + difficulty: 'Hard', + leetcode: 'https://leetcode.com/problems/chalkboard-xor-game/', + }, + { + id: 20, + title: 'Trips and Users', + description: + "Given table `Trips` with columns `id`, `client_id`, `driver_id`, `city_id`, `status`, and `request_at`, where `id` is the primary key. The table holds all taxi trips. Each trip has a unique `id`, while `client_id` and `driver_id` are foreign keys to the `users_id` in the `Users` table.\n\nStatus is an `ENUM` (category) type of (`'completed'`, `'cancelled_by_driver'`, `'cancelled_by_client'`).\n\nGiven table `Users` with columns `users_id`, `banned`, and `role`, `users_id` is the primary key (column with unique values) for this table. The table holds all users. Each user has a unique `users_id` and `role` is an `ENUM` type of (`'client'`, `'driver'`, `'partner'`). `banned` is an `ENUM` category of type (`'Yes'`, `'No'`). The cancellation rate is computed by dividing the number of canceled (by client or driver) requests with unbanned users by the total number of requests with unbanned users on that day.\n\nWrite a solution to find the cancellation rate of requests with unbanned users (both client and driver must not be banned) each day between `\"2013-10-01\"` and `\"2013-10-03\"`. Round the cancellation rate to two decimal points.", + topics: ['Databases'], + difficulty: 'Hard', + leetcode: 'https://leetcode.com/problems/trips-and-users/', + }, +]; + +interface Question { + title: string; + difficulty: string; + topic: Array; + description: string; +} + +export const questionData: Array = questionDetails.map((question) => ({ + title: question.title, + description: question.description, + difficulty: question.difficulty as 'Easy' | 'Medium' | 'Hard', + topic: question.topics, +})); diff --git a/backend/question/src/lib/db/schema.ts b/backend/question/src/lib/db/schema.ts new file mode 100644 index 0000000000..2ac90ef882 --- /dev/null +++ b/backend/question/src/lib/db/schema.ts @@ -0,0 +1,49 @@ +import { + integer, + pgEnum, + pgTable, + serial, + text, + timestamp, + uniqueIndex, + uuid, + varchar, +} from 'drizzle-orm/pg-core'; + +export const questions = pgTable('questions', { + id: serial('id').primaryKey(), + title: varchar('title', { length: 255 }).notNull(), + difficulty: varchar('difficulty', { length: 50 }).notNull(), + topic: varchar('topic', { length: 255 }).array().notNull(), + description: text('description').notNull(), + createdAt: timestamp('created_at', { precision: 6, withTimezone: true }).defaultNow(), + updatedAt: timestamp('updated_at', { precision: 6, withTimezone: true }).defaultNow(), +}); + +export const questionAttempts = pgTable( + 'question_attempts', + { + attemptId: serial('attempt_id').primaryKey(), + questionId: integer('question_id').notNull(), + userId1: uuid('user_id_1').notNull(), + userId2: uuid('user_id_2'), // Nullable if only one user is involved + code: text('code').notNull(), + timestamp: timestamp('timestamp', { precision: 6, withTimezone: true }).defaultNow(), + language: varchar('language', { length: 50 }).notNull(), + }, + (questionAttempt) => ({ + uniqueUsersAttempt: uniqueIndex('unique_users_attempt').on( + questionAttempt.questionId, + questionAttempt.userId1, + questionAttempt.userId2 + ), + }) +); + +export const actionEnum = pgEnum('action', ['SEED']); + +export const admin = pgTable('admin', { + id: uuid('id').primaryKey().notNull().defaultRandom(), + createdAt: timestamp('created_at').defaultNow(), + action: actionEnum('action').notNull(), +}); diff --git a/backend/question/src/lib/db/seed.ts b/backend/question/src/lib/db/seed.ts new file mode 100644 index 0000000000..c22aa2530e --- /dev/null +++ b/backend/question/src/lib/db/seed.ts @@ -0,0 +1,55 @@ +import { eq, sql } from 'drizzle-orm'; + +import { admin as adminTable, db, questions as questionTable } from '@/lib/db'; + +import { questionData } from './sample-data/questions'; + +const seedQuestions = async () => { + try { + await db.transaction(async (trx) => { + const seedRecords = await trx.select().from(adminTable).where(eq(adminTable.action, 'SEED')); + + if (seedRecords && seedRecords.length > 0) { + console.info( + `[Questions]: Seeded already at: ${(seedRecords[seedRecords.length - 1].createdAt ?? new Date()).toLocaleString()}` + ); + return; + } + + // Delete all questions (not table) + await trx.delete(questionTable); + + // Reset Serial to start index 1 + await trx.execute(sql` + SELECT setval( + pg_get_serial_sequence('questions', 'id'), + COALESCE(max(id) + 1, 1), + false + ) + FROM questions; + `); + + for (const question of questionData) { + await trx + .insert(questionTable) + .values({ ...question, id: undefined }) // Let DB set ID + .onConflictDoNothing(); + } + + await trx.insert(adminTable).values({ action: 'SEED' }); + }); + } catch (error) { + console.log('[Questions]: Error seeding question data', error); + process.exit(1); + } +}; + +void seedQuestions() + .then(() => { + console.log('[Questions]: Seeding completed successfully.'); + process.exit(0); + }) + .catch((error) => { + console.error('[Questions]: Error during seeding:', error); + process.exit(1); + }); diff --git a/backend/question/src/lib/utils/index.ts b/backend/question/src/lib/utils/index.ts new file mode 100644 index 0000000000..1ff09efd40 --- /dev/null +++ b/backend/question/src/lib/utils/index.ts @@ -0,0 +1 @@ +export * from './logger'; diff --git a/backend/question/src/lib/utils/logger.ts b/backend/question/src/lib/utils/logger.ts new file mode 100644 index 0000000000..e41655d003 --- /dev/null +++ b/backend/question/src/lib/utils/logger.ts @@ -0,0 +1,3 @@ +import pinoLogger from 'pino'; + +export const logger = pinoLogger(); diff --git a/backend/question/src/lib/uuid.ts b/backend/question/src/lib/uuid.ts new file mode 100644 index 0000000000..4d4a6b3b03 --- /dev/null +++ b/backend/question/src/lib/uuid.ts @@ -0,0 +1,5 @@ +import { validate } from 'uuid'; + +export const isValidUUID = (uuid: string) => { + return validate(uuid); +}; diff --git a/backend/question/src/routes/question.ts b/backend/question/src/routes/question.ts new file mode 100644 index 0000000000..a87e05ff94 --- /dev/null +++ b/backend/question/src/routes/question.ts @@ -0,0 +1,39 @@ +import { Router } from 'express'; + +import { createAttempt, getAttempts } from '@/controller/attempted-controller'; +import { + createQuestion, + deleteQuestion, + getDifficulties, + getQuestionDetails, + getQuestions, + getTopics, + searchQuestionsByTitle, + updateQuestion, +} from '@/controller/question-controller'; +import { fetchRandomQuestionByIncreasingAttemptCount } from '@/controller/unattempted-controller'; + +const router = Router(); + +router.get('/search', searchQuestionsByTitle); + +router.get('/topics', getTopics); +router.get('/difficulties', getDifficulties); + +router.get('/', getQuestions); + +router.get('/:questionId', getQuestionDetails); + +router.post('/random', fetchRandomQuestionByIncreasingAttemptCount); + +router.post('/attempts', getAttempts); +router.post('/newAttempt', createAttempt); + +// ====================================== +// CRUD +// ====================================== +router.post('/create', createQuestion); +router.put('/:questionId', updateQuestion); +router.delete('/:questionId', deleteQuestion); + +export default router; diff --git a/backend/question/src/server.ts b/backend/question/src/server.ts new file mode 100644 index 0000000000..648b33a90c --- /dev/null +++ b/backend/question/src/server.ts @@ -0,0 +1,63 @@ +import { exit } from 'process'; + +import cors from 'cors'; +import { sql } from 'drizzle-orm'; +import express, { json } from 'express'; +import helmet from 'helmet'; +import { StatusCodes } from 'http-status-codes'; +import pino from 'pino-http'; + +import { LOAD_TEST_POD, UI_HOST } from '@/config'; +import { config, db } from '@/lib/db'; +import { logger } from '@/lib/utils'; +import questionsRouter from '@/routes/question'; + +const app = express(); +app.use( + pino({ + serializers: { + req: ({ id, method, url, headers: { host, referer }, query, params }) => ({ + id, + method, + url, + headers: { host, referer }, + query, + params, + }), + res: ({ statusCode }) => ({ statusCode }), + }, + }) +); +app.use(json()); +app.use(helmet()); +app.use( + cors({ + origin: [UI_HOST, LOAD_TEST_POD], + credentials: true, + }) +); + +app.use('/questions', questionsRouter); + +// Health Check for Docker +app.get('/health', (_req, res) => res.status(StatusCodes.OK).send('OK')); + +export const dbHealthCheck = async () => { + try { + await db.execute(sql`SELECT 1`); + logger.info('Connected to DB'); + } catch (error) { + const { message } = error as Error; + logger.error('Cannot connect to DB: ' + message); + logger.error(`DB Config: ${JSON.stringify({ ...config, password: '' })}`); + exit(1); + } +}; + +// Ensure DB service is up before running. +app.get('/test-db', async (_req, res) => { + await dbHealthCheck(); + res.json({ message: 'OK ' }); +}); + +export default app; diff --git a/backend/question/src/services/get/get-attempts.ts b/backend/question/src/services/get/get-attempts.ts new file mode 100644 index 0000000000..1ee4d35bf1 --- /dev/null +++ b/backend/question/src/services/get/get-attempts.ts @@ -0,0 +1,29 @@ +import { and, desc, eq, or } from 'drizzle-orm'; + +import { db, questionAttempts as QUESTION_ATTEMPTS_TABLE } from '@/lib/db'; + +type Params = { + questionId: number; + userId: string; + limit?: number; + offset?: number; +}; + +export const getQuestionAttempts = async ({ questionId, userId, limit = 10, offset }: Params) => { + if (limit < 1) { + limit = 1; + } + + const userIdFilters = [ + eq(QUESTION_ATTEMPTS_TABLE.userId1, userId), + eq(QUESTION_ATTEMPTS_TABLE.userId2, userId), + ]; + const filterClauses = [eq(QUESTION_ATTEMPTS_TABLE.questionId, questionId), or(...userIdFilters)]; + return await db + .select() + .from(QUESTION_ATTEMPTS_TABLE) + .where(and(...filterClauses)) + .orderBy(desc(QUESTION_ATTEMPTS_TABLE.timestamp)) + .offset(offset ?? 0) + .limit(limit); +}; diff --git a/backend/question/src/services/get/get-random-question.ts b/backend/question/src/services/get/get-random-question.ts new file mode 100644 index 0000000000..b5b0dbce9a --- /dev/null +++ b/backend/question/src/services/get/get-random-question.ts @@ -0,0 +1,208 @@ +import { + and, + arrayOverlaps, + asc, + eq, + getTableColumns, + inArray, + InferSelectModel, + isNull, + or, + sql, +} from 'drizzle-orm'; + +import { db } from '@/lib/db/index'; +import { + questionAttempts as QUESTION_ATTEMPTS_TABLE, + questions as QUESTIONS_TABLE, +} from '@/lib/db/schema'; +import { logger } from '@/lib/utils'; + +/** + * Both userIds specified (they are matches after all) + * 1.1. Both Unattempted + * + * SELECT q.* + * FROM + * questions q + * LEFT JOIN + * question_attempts qa + * ON + * q.id = qa.question_id + * AND ( + * qa.user_id_1 IN (userId1, userId2) + * OR qa.user_id_2 IN (userId1, userId2) + * ) + * WHERE + * qa.question_id IS NULL + * AND q.topic && topic + * AND q.difficulty = difficulty + * ORDER BY RANDOM() + * LIMIT 1; + * + * 1.2. + * - Get topic/difficulty for both + * - Pick one with least attempts + * WITH "at" AS ( + * SELECT + * q.*, + * SUM( + * CASE WHEN + * qa.user_id_1 = $userId1 + * OR qa.user_id_2 = $userId1 THEN 1 END + * ) AS user1_attempts, + * SUM( + * CASE WHEN + * qa.user_id_1 = $userId2 + * OR qa.user_id_2 = $userId2 THEN 1 END + * ) AS user2_attempts + * FROM + * questions q + * JOIN question_attempts qa ON q.id = qa.question_id + * AND ( + * qa.user_id_1 IN ($userId1, $userId2) + * OR qa.user_id_2 IN ($userId1, $userId2) + * ) + * WHERE + * q.topic::text[] && $topic + * AND q.difficulty = $difficulty + * GROUP BY + * q.id + * ) + * SELECT + * * + * FROM + * "at" + * ORDER BY + * ( + * COALESCE("at".user1_attempts, 0) + COALESCE("at".user2_attempts, 0) + * ) ASC + * LIMIT 1 + */ + +type Params = { + userId1: string; + userId2: string; + topics?: Array; + difficulty?: string; +}; + +type IGetRandomQuestionResponse = InferSelectModel & { + attemptCount: number; +}; + +// Fetch an unattempted question or fallback to the least attempted one +export const getRandomQuestion = async ({ + userId1, + userId2, + topics, + difficulty, +}: Params): Promise => { + // If an attempt contains either user's ID + const ids = [userId1, userId2]; + const userIdClause = [ + inArray(QUESTION_ATTEMPTS_TABLE.userId1, ids), + inArray(QUESTION_ATTEMPTS_TABLE.userId2, ids), + ]; + // Join both tables on qId equality, filtering only rows with either user's ID + const joinClause = [ + eq(QUESTIONS_TABLE.id, QUESTION_ATTEMPTS_TABLE.questionId), + or(...userIdClause), + ]; + + // Try different filter combinations in order of specificity + const filterCombinations = [ + // Exact match + topics && difficulty + ? [arrayOverlaps(QUESTIONS_TABLE.topic, topics), eq(QUESTIONS_TABLE.difficulty, difficulty)] + : // Topic only + topics + ? [arrayOverlaps(QUESTIONS_TABLE.topic, topics)] + : // Difficulty only + difficulty + ? [eq(QUESTIONS_TABLE.difficulty, difficulty)] + : // No filters + [], + ]; + + // Additional combinations if both topic and difficulty are provided + if (topics && difficulty) { + filterCombinations.push( + // Topic only + [arrayOverlaps(QUESTIONS_TABLE.topic, topics)], + // Difficulty only + [eq(QUESTIONS_TABLE.difficulty, difficulty)], + // No filters + [] + ); + } + + for (const filterClause of filterCombinations) { + // Check if AT LEAST 1 question exists with current filters + const questionCounts = await db + .select({ id: QUESTIONS_TABLE.id }) + .from(QUESTIONS_TABLE) + .where(and(...filterClause)) + .limit(1); + + // No questions exist with the filter. + if (!questionCounts || !questionCounts.length) { + continue; + } + + // Try to find an unattempted question with current filters + const bothUnattempted = await db + .select({ question: QUESTIONS_TABLE }) + .from(QUESTIONS_TABLE) + .leftJoin(QUESTION_ATTEMPTS_TABLE, and(...joinClause)) + .where(and(isNull(QUESTION_ATTEMPTS_TABLE.attemptId), ...filterClause)) + .orderBy(sql`RANDOM()`) + .limit(1); + + if (bothUnattempted && bothUnattempted.length > 0) { + return { ...bothUnattempted[0].question, attemptCount: 0 }; + } + + // If no unattempted question, try least attempted + let nestedQuery = db + .select({ + ...getTableColumns(QUESTIONS_TABLE), + user1Count: + sql`SUM(CASE WHEN ${QUESTION_ATTEMPTS_TABLE.userId1} = ${userId1}::uuid OR ${QUESTION_ATTEMPTS_TABLE.userId2} = ${userId1}::uuid THEN 1 END)`.as( + 'user1_attempts' + ), + user2Count: + sql`SUM(CASE WHEN ${QUESTION_ATTEMPTS_TABLE.userId1} = ${userId2}::uuid OR ${QUESTION_ATTEMPTS_TABLE.userId2} = ${userId2}::uuid THEN 1 END)`.as( + 'user2_attempts' + ), + }) + .from(QUESTIONS_TABLE) + .innerJoin(QUESTION_ATTEMPTS_TABLE, and(...joinClause)) + .$dynamic(); + + if (filterClause.length) { + nestedQuery = nestedQuery.where(and(...filterClause)); + } + + nestedQuery = nestedQuery.groupBy(QUESTIONS_TABLE.id); + + const attempts = db.$with('at').as(nestedQuery); + + const result = await db + .with(attempts) + .select() + .from(attempts) + .orderBy(asc(sql`COALESCE(user1_attempts,0) + COALESCE(user2_attempts,0)`)) + .limit(1); + + if (result && result.length > 0) { + const { user1Count, user2Count, ...details } = result[0]; + const attemptCount = + (user1Count ? (user1Count as number) : 0) + (user2Count ? (user2Count as number) : 0); + return { ...details, attemptCount }; + } + } + + logger.error('No questions found with any filter combination'); + return null; +}; diff --git a/backend/question/src/services/get/index.ts b/backend/question/src/services/get/index.ts new file mode 100644 index 0000000000..72534f3303 --- /dev/null +++ b/backend/question/src/services/get/index.ts @@ -0,0 +1,195 @@ +import { and, arrayOverlaps, eq, getTableColumns, ilike, or, sql } from 'drizzle-orm'; +import { StatusCodes } from 'http-status-codes'; + +import { db } from '@/lib/db/index'; +import { questionAttempts, questions } from '@/lib/db/schema'; + +import type { + IGetDifficultiesResponse, + IGetQuestionPayload, + IGetQuestionResponse, + IGetQuestionsPayload, + IGetQuestionsResponse, + IGetTopicsResponse, +} from './types'; + +export const getQuestionsService = async ( + payload: IGetQuestionsPayload +): Promise => { + const { questionName, difficulty, topic, pageNum = 0, recordsPerPage = 20, userId } = payload; + const offset = pageNum * recordsPerPage; + + const whereClause = []; + + if (questionName) { + whereClause.push(ilike(questions.title, `%${questionName}%`)); + } + + if (difficulty) { + whereClause.push(eq(questions.difficulty, difficulty)); + } + + if (topic && topic.length > 0) { + whereClause.push(arrayOverlaps(questions.topic, topic)); + } + + const query = db + .select({ + ...getTableColumns(questions), + attempted: sql`COALESCE(COUNT(${questionAttempts.attemptId}), 0)`.as('attempted'), + }) + .from(questions) + .leftJoin( + questionAttempts, + and( + eq(questionAttempts.questionId, questions.id), + or(eq(questionAttempts.userId1, userId), eq(questionAttempts.userId2, userId)) + ) + ) + .where(and(...whereClause)) + .groupBy(questions.id) + .limit(recordsPerPage) + .offset(offset) + .orderBy(questions.id); + + const [results, totalCount] = await Promise.all([ + query, + db + .select({ count: questions.id }) + .from(questions) + .where(and(...whereClause)) + .then((res) => res.length), + ]); + + return { + code: StatusCodes.OK, + data: { + questions: results.map((q) => ({ + id: q.id, + title: q.title, + difficulty: q.difficulty, + topic: q.topic, + attempted: (q.attempted as number) > 0, + })), + totalQuestions: totalCount, + }, + }; +}; + +export const getQuestionDetailsService = async ( + payload: IGetQuestionPayload +): Promise => { + const { questionId } = payload; + + const result = await db + .select() + .from(questions) + .where(sql`${questions.id} = ${questionId}`) + .limit(1); + + if (result.length === 0) { + return { + code: StatusCodes.NOT_FOUND, + data: { question: null }, + error: { + message: 'Question not found', + }, + }; + } + + return { + code: StatusCodes.OK, + data: { question: result[0] }, + }; +}; + +export const searchQuestionsByTitleService = async ( + title: string, + page: number, + limit: number +): Promise => { + const searchPattern = `%${title}%`; + const effectivePage = page ?? 1; + const effectiveLimit = limit ?? 10; + const offset = (effectivePage - 1) * effectiveLimit; + + // Query the database for questions matching the title + const results = await db + .select({ + id: questions.id, + title: questions.title, + difficulty: questions.difficulty, + topic: questions.topic, + }) + .from(questions) + .where(sql`${questions.title} ILIKE ${searchPattern}`) // Use ILIKE for case-insensitive matching + .limit(effectiveLimit) + .offset(offset); + + // Return the results as per IGetQuestionsResponse format + return { + code: StatusCodes.OK, + data: { + questions: results, // Directly returning the query results + totalQuestions: results.length, // Count of questions returned + }, + }; +}; + +export const getTopicsService = async (): Promise => { + const results = await db + .select({ + topic: questions.topic, + }) + .from(questions); + + // If no questions are found, return a NOT_FOUND response + if (results.length === 0) { + return { + code: StatusCodes.NOT_FOUND, + data: { topics: [] }, + error: { + message: 'No topics found', + }, + }; + } + + const allTopics = results.flatMap((result) => result.topic); + const uniqueTopics = Array.from(new Set(allTopics)); + + return { + code: StatusCodes.OK, + data: { + topics: uniqueTopics, + }, + }; +}; + +export const getDifficultiesService = async (): Promise => { + const results = await db.selectDistinct({ difficulty: questions.difficulty }).from(questions); + + // If no difficulties are found, return a NOT_FOUND response + if (results.length === 0) { + return { + code: StatusCodes.NOT_FOUND, + data: { difficulties: [] }, + error: { + message: 'No difficulties found', + }, + }; + } + + const uniqueDifficulties = results + .map((result) => result.difficulty) + .sort((a, b) => { + if (a === 'Hard' || b === 'Easy') return 1; + if (b === 'Hard' || a === 'Easy') return -1; + return 0; + }); + return { + code: StatusCodes.OK, + data: { + difficulties: uniqueDifficulties, + }, + }; +}; diff --git a/backend/question/src/services/get/types.ts b/backend/question/src/services/get/types.ts new file mode 100644 index 0000000000..67c920843b --- /dev/null +++ b/backend/question/src/services/get/types.ts @@ -0,0 +1,54 @@ +import type { IServiceResponse } from '@/types'; + +//============================================================================= +// /get +//============================================================================= +export type IGetQuestionsPayload = { + // Filters + userId: string; + questionName?: string; + difficulty?: string; + topic?: Array; + // Pagination + pageNum?: number; // Default 0 + recordsPerPage?: number; // Default 20 +}; + +export type IGetQuestionsResponse = IServiceResponse<{ + questions: Array<{ + id: number; // question's unique identifier or number + title: string; // name or title of the question + difficulty: string; // difficulty level (e.g., 'easy', 'medium', 'hard') + topic: Array; // array of topics the question belongs to + attempted?: boolean; // whether the user has attempted this question + }>; + totalQuestions: number; // total number of questions matching the query +}>; + +export type IGetTopicsResponse = IServiceResponse<{ + topics: Array; +}>; + +export type IGetDifficultiesResponse = IServiceResponse<{ + difficulties: Array; +}>; + +//============================================================================= +// /details +//============================================================================= +export type IGetQuestionPayload = { + questionId: number; +}; + +export type IGetQuestionResponse = IServiceResponse<{ + question?: { + title: string; // name or title of the question + description: string; // question description + difficulty: string; // difficulty level (e.g., 'easy', 'medium', 'hard') + topic: Array; // array of topics the question belongs to + } | null; +}>; + +//============================================================================= +// /random (For matching) +//============================================================================= diff --git a/backend/question/src/services/post/addAttempt.ts b/backend/question/src/services/post/addAttempt.ts new file mode 100644 index 0000000000..9cfd2d802f --- /dev/null +++ b/backend/question/src/services/post/addAttempt.ts @@ -0,0 +1,22 @@ +import { db } from '@/lib/db/index'; +import { questionAttempts } from '@/lib/db/schema'; + +// Define the data structure for an attempt +interface AttemptData { + questionId: number; + userId1: string; + userId2?: string; + code: string; + language: string; +} + +// Function to add an attempt to the database +export const addAttempt = async (attemptData: AttemptData) => { + return await db.insert(questionAttempts).values({ + questionId: attemptData.questionId, + userId1: attemptData.userId1, + userId2: attemptData.userId2, + code: attemptData.code, + language: attemptData.language, + }); +}; diff --git a/backend/question/src/services/post/index.ts b/backend/question/src/services/post/index.ts new file mode 100644 index 0000000000..45a6438e0d --- /dev/null +++ b/backend/question/src/services/post/index.ts @@ -0,0 +1,70 @@ +import { eq } from 'drizzle-orm'; + +import { db } from '@/lib/db/index'; +import { questions } from '@/lib/db/schema'; + +import { ICreateQuestionPayload, IDeleteQuestionPayload, IUpdateQuestionPayload } from './types'; + +export const createQuestionService = async (payload: ICreateQuestionPayload) => { + try { + const [newQuestion] = await db + .insert(questions) + .values({ + title: payload.title, + description: payload.description, + difficulty: payload.difficulty, + topic: payload.topics.map(String), + }) + .returning(); + + return { success: true, code: 201, data: newQuestion }; + } catch (error) { + console.error('Error creating question:', error); + return { success: false, code: 500, message: 'Failed to create question' }; + } +}; + +export const updateQuestionService = async (payload: IUpdateQuestionPayload) => { + try { + const updateSet: Partial = {}; + + if (payload.title !== undefined) updateSet.title = payload.title; + if (payload.description !== undefined) updateSet.description = payload.description; + if (payload.difficulty !== undefined) updateSet.difficulty = payload.difficulty; + if (payload.topics !== undefined && Array.isArray(payload.topics)) + updateSet.topic = payload.topics.map(String); + + const [updatedQuestion] = await db + .update(questions) + .set(updateSet) + .where(eq(questions.id, Number(payload.id))) + .returning(); + + if (!updatedQuestion) { + return { success: false, code: 404, message: 'Question not found' }; + } + + return { success: true, code: 200, data: updatedQuestion }; + } catch (error) { + console.error('Error updating question:', error); + return { success: false, code: 500, message: 'Failed to update question' }; + } +}; + +export const deleteQuestionService = async (payload: IDeleteQuestionPayload) => { + try { + const [deletedQuestion] = await db + .delete(questions) + .where(eq(questions.id, payload.id)) + .returning(); + + if (!deletedQuestion) { + return { success: false, code: 404, message: 'Question not found' }; + } + + return { success: true, code: 200, message: 'Question deleted successfully' }; + } catch (error) { + console.error('Error deleting question:', error); + return { success: false, code: 500, message: 'Failed to delete question' }; + } +}; diff --git a/backend/question/src/services/post/types.ts b/backend/question/src/services/post/types.ts new file mode 100644 index 0000000000..2ab4bfd4d6 --- /dev/null +++ b/backend/question/src/services/post/types.ts @@ -0,0 +1,24 @@ +export interface ICreateQuestionPayload { + title: string; + description: string; + difficulty: string; + topics: Array; +} + +export interface IUpdateQuestionPayload extends ICreateQuestionPayload { + id: number; +} + +export interface IDeleteQuestionPayload { + id: number; +} + +export interface Question { + id: number; + title: string; + difficulty: string; + topic: Array; + description: string; + createdAt: Date; + updatedAt: Date; +} diff --git a/backend/question/src/types/index.ts b/backend/question/src/types/index.ts new file mode 100644 index 0000000000..6738aa624a --- /dev/null +++ b/backend/question/src/types/index.ts @@ -0,0 +1 @@ +export * from './utility'; diff --git a/backend/question/src/types/utility.ts b/backend/question/src/types/utility.ts new file mode 100644 index 0000000000..a07fa1295a --- /dev/null +++ b/backend/question/src/types/utility.ts @@ -0,0 +1,9 @@ +import type { StatusCodes } from 'http-status-codes'; + +export type IServiceResponse = { + code: StatusCodes; + error?: { + message: string; + }; + data?: T; +}; diff --git a/backend/question/tsconfig.json b/backend/question/tsconfig.json new file mode 100644 index 0000000000..c18b7e3c96 --- /dev/null +++ b/backend/question/tsconfig.json @@ -0,0 +1,110 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig to read more about this file */ + /* Projects */ + // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ + // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ + // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ + // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ + // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ + // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ + /* Language and Environment */ + "target": "ES2022", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ + // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + // "jsx": "preserve", /* Specify what JSX code is generated. */ + // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ + // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ + // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ + // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ + // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ + // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ + // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ + // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ + // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ + /* Modules */ + "module": "commonjs", /* Specify what module code is generated. */ + "rootDir": "./src", /* Specify the root folder within your source files. */ + // "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */ + "baseUrl": ".", /* Specify the base directory to resolve non-relative module names. */ + "paths": { + "@/*": ["./src/*"] + }, /* Specify a set of entries that re-map imports to additional lookup locations. */ + // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ + // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ + // "types": [], /* Specify type package names to be included without being referenced in a source file. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ + // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ + // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ + // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ + // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ + // "resolveJsonModule": true, /* Enable importing .json files. */ + // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ + // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ + /* JavaScript Support */ + // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ + // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ + // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ + /* Emit */ + // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ + // "declarationMap": true, /* Create sourcemaps for d.ts files. */ + // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ + // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ + // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ + // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ + "outDir": "./dist", /* Specify an output folder for all emitted files. */ + // "removeComments": true, /* Disable emitting comments. */ + // "noEmit": true, /* Disable emitting files from a compilation. */ + // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ + // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ + // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ + // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ + // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ + // "newLine": "crlf", /* Set the newline character for emitting files. */ + // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ + // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ + // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ + // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ + // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ + // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ + /* Interop Constraints */ + // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ + // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ + // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ + "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ + // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ + "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ + /* Type Checking */ + "strict": true, /* Enable all strict type-checking options. */ + // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ + // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ + // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ + // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ + // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ + // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ + // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ + // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ + // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ + // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ + // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ + // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ + // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ + // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ + // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ + // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ + /* Completeness */ + // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ + "skipLibCheck": true /* Skip type checking all .d.ts files. */ + }, + "exclude": [ + "drizzle.*.*ts" + ], + "ts-node": { + "swc": true, + "require": ["tsconfig-paths/register"] + } +} \ No newline at end of file diff --git a/backend/user/.dockerignore b/backend/user/.dockerignore new file mode 100644 index 0000000000..bcfa047798 --- /dev/null +++ b/backend/user/.dockerignore @@ -0,0 +1,4 @@ +node_modules +dist/ +.git +.env* \ No newline at end of file diff --git a/backend/user/.env.compose b/backend/user/.env.compose new file mode 100644 index 0000000000..73e379a86b --- /dev/null +++ b/backend/user/.env.compose @@ -0,0 +1,12 @@ +# To be injected by container +# PEERPREP_UI_HOST= + +EXPRESS_PORT=9001 +EXPRESS_DB_HOST="user-db" +EXPRESS_DB_PORT=5432 +POSTGRES_DB="user" +POSTGRES_USER="peerprep-user-express" +POSTGRES_PASSWORD="69/X8JxtAVsM+0YHT4RR5D7Ahf7bTobI4EED64FrzIU=" +PGDATA="/data/user-db" + +EXPRESS_JWT_SECRET_KEY="jd+9qlXA0a3YsmVf2KJgyiJ3SprIR318IAwhRXck4Y8=" diff --git a/backend/user/.env.docker b/backend/user/.env.docker new file mode 100644 index 0000000000..98271cbc19 --- /dev/null +++ b/backend/user/.env.docker @@ -0,0 +1,14 @@ +PEERPREP_UI_HOST=http://host.docker.internal:5173 + +EXPRESS_PORT=9001 + +# When run with standalone build +EXPRESS_DB_HOST=host.docker.internal + +EXPRESS_DB_PORT=5431 +POSTGRES_DB=user +POSTGRES_USER=peerprep-user-express +POSTGRES_PASSWORD=69/X8JxtAVsM+0YHT4RR5D7Ahf7bTobI4EED64FrzIU= +PGDATA=/data/user-db + +EXPRESS_JWT_SECRET_KEY=jd+9qlXA0a3YsmVf2KJgyiJ3SprIR318IAwhRXck4Y8= diff --git a/backend/user/.env.local b/backend/user/.env.local new file mode 100644 index 0000000000..f4694ae54d --- /dev/null +++ b/backend/user/.env.local @@ -0,0 +1,11 @@ +PEERPREP_UI_HOST="http://localhost:5173" + +EXPRESS_PORT=9001 +EXPRESS_DB_HOST="localhost" +EXPRESS_DB_PORT=5431 +POSTGRES_DB="user" +POSTGRES_USER="peerprep-user-express" +POSTGRES_PASSWORD="69/X8JxtAVsM+0YHT4RR5D7Ahf7bTobI4EED64FrzIU=" +PGDATA="/data/user-db" + +EXPRESS_JWT_SECRET_KEY="jd+9qlXA0a3YsmVf2KJgyiJ3SprIR318IAwhRXck4Y8=" diff --git a/backend/user/README.md b/backend/user/README.md new file mode 100644 index 0000000000..e09ac20065 --- /dev/null +++ b/backend/user/README.md @@ -0,0 +1,28 @@ +# User Service + +## Running with Docker (Standalone) + +1. Run this command to build: + ```sh + docker build \ + -t user-express-local \ + --build-arg port=9001 \ + -f express.Dockerfile . + ``` +2. Run this command, from the root folder: + ```sh + make db-up + ``` + +3. Run the necessary migrate and seed commands, if you haven't yet. + +4. Run this command to expose the container: + ```sh + docker run -p 9001:9001 --env-file ./.env.docker user-express-local + ``` + +## Running with Docker-Compose (Main config) + +Edit the variables in the `.env.compose` file and run `make up` from the root folder. + +Any startup instructions will be run from `entrypoint.sh` instead. diff --git a/backend/user/drizzle.config.ts b/backend/user/drizzle.config.ts new file mode 100644 index 0000000000..b95650e9d9 --- /dev/null +++ b/backend/user/drizzle.config.ts @@ -0,0 +1,16 @@ +import { defineConfig } from 'drizzle-kit'; + +const config = { + host: process.env.EXPRESS_DB_HOST!, + port: Number.parseInt(process.env.EXPRESS_DB_PORT!), + database: process.env.POSTGRES_DB!, + user: process.env.POSTGRES_USER, + password: process.env.POSTGRES_PASSWORD, +}; + +export default defineConfig({ + schema: './src/lib/db/schema.ts', + out: './drizzle', + dialect: 'postgresql', + dbCredentials: config, +}); diff --git a/backend/user/drizzle/0000_initial_schema.sql b/backend/user/drizzle/0000_initial_schema.sql new file mode 100644 index 0000000000..facb4ddc60 --- /dev/null +++ b/backend/user/drizzle/0000_initial_schema.sql @@ -0,0 +1,25 @@ +DO $$ BEGIN + CREATE TYPE "public"."action" AS ENUM('SEED'); +EXCEPTION + WHEN duplicate_object THEN null; +END $$; +--> statement-breakpoint +CREATE TABLE IF NOT EXISTS "admin" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + "created_at" timestamp DEFAULT now(), + "action" "action" NOT NULL +); +--> statement-breakpoint +CREATE TABLE IF NOT EXISTS "users" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + "email" varchar(255) NOT NULL, + "username" varchar(255) NOT NULL, + "first_name" varchar(255) NOT NULL, + "last_name" varchar(255) NOT NULL, + "password" varchar(255) NOT NULL, + "failed_attempts" smallint DEFAULT 0, + "unlock_time" timestamp (6) with time zone, + "attempted_questions" integer[], + CONSTRAINT "users_email_unique" UNIQUE("email"), + CONSTRAINT "users_username_unique" UNIQUE("username") +); diff --git a/backend/user/drizzle/0001_add_admin_user.sql b/backend/user/drizzle/0001_add_admin_user.sql new file mode 100644 index 0000000000..14989f482a --- /dev/null +++ b/backend/user/drizzle/0001_add_admin_user.sql @@ -0,0 +1 @@ +ALTER TABLE "users" ADD COLUMN "is_admin" boolean DEFAULT false; \ No newline at end of file diff --git a/backend/user/drizzle/meta/0000_snapshot.json b/backend/user/drizzle/meta/0000_snapshot.json new file mode 100644 index 0000000000..c94000a94f --- /dev/null +++ b/backend/user/drizzle/meta/0000_snapshot.json @@ -0,0 +1,136 @@ +{ + "id": "f01e3d91-1038-48b8-a073-a6a9a8a308fd", + "prevId": "00000000-0000-0000-0000-000000000000", + "version": "7", + "dialect": "postgresql", + "tables": { + "public.admin": { + "name": "admin", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false, + "default": "now()" + }, + "action": { + "name": "action", + "type": "action", + "typeSchema": "public", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} + }, + "public.users": { + "name": "users", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "email": { + "name": "email", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "username": { + "name": "username", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "first_name": { + "name": "first_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "last_name": { + "name": "last_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "password": { + "name": "password", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "failed_attempts": { + "name": "failed_attempts", + "type": "smallint", + "primaryKey": false, + "notNull": false, + "default": 0 + }, + "unlock_time": { + "name": "unlock_time", + "type": "timestamp (6) with time zone", + "primaryKey": false, + "notNull": false + }, + "attempted_questions": { + "name": "attempted_questions", + "type": "integer[]", + "primaryKey": false, + "notNull": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "users_email_unique": { + "name": "users_email_unique", + "nullsNotDistinct": false, + "columns": [ + "email" + ] + }, + "users_username_unique": { + "name": "users_username_unique", + "nullsNotDistinct": false, + "columns": [ + "username" + ] + } + } + } + }, + "enums": { + "public.action": { + "name": "action", + "schema": "public", + "values": [ + "SEED" + ] + } + }, + "schemas": {}, + "sequences": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} \ No newline at end of file diff --git a/backend/user/drizzle/meta/0001_snapshot.json b/backend/user/drizzle/meta/0001_snapshot.json new file mode 100644 index 0000000000..1e2c2f42be --- /dev/null +++ b/backend/user/drizzle/meta/0001_snapshot.json @@ -0,0 +1,143 @@ +{ + "id": "5293f5bb-f4d5-43a4-b2bf-f6ebb241dde1", + "prevId": "f01e3d91-1038-48b8-a073-a6a9a8a308fd", + "version": "7", + "dialect": "postgresql", + "tables": { + "public.admin": { + "name": "admin", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "created_at": { + "name": "created_at", + "type": "timestamp", + "primaryKey": false, + "notNull": false, + "default": "now()" + }, + "action": { + "name": "action", + "type": "action", + "typeSchema": "public", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {} + }, + "public.users": { + "name": "users", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "email": { + "name": "email", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "username": { + "name": "username", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "first_name": { + "name": "first_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "last_name": { + "name": "last_name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "password": { + "name": "password", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "failed_attempts": { + "name": "failed_attempts", + "type": "smallint", + "primaryKey": false, + "notNull": false, + "default": 0 + }, + "unlock_time": { + "name": "unlock_time", + "type": "timestamp (6) with time zone", + "primaryKey": false, + "notNull": false + }, + "attempted_questions": { + "name": "attempted_questions", + "type": "integer[]", + "primaryKey": false, + "notNull": false + }, + "is_admin": { + "name": "is_admin", + "type": "boolean", + "primaryKey": false, + "notNull": false, + "default": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "users_email_unique": { + "name": "users_email_unique", + "nullsNotDistinct": false, + "columns": [ + "email" + ] + }, + "users_username_unique": { + "name": "users_username_unique", + "nullsNotDistinct": false, + "columns": [ + "username" + ] + } + } + } + }, + "enums": { + "public.action": { + "name": "action", + "schema": "public", + "values": [ + "SEED" + ] + } + }, + "schemas": {}, + "sequences": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} \ No newline at end of file diff --git a/backend/user/drizzle/meta/_journal.json b/backend/user/drizzle/meta/_journal.json new file mode 100644 index 0000000000..ed929bac0b --- /dev/null +++ b/backend/user/drizzle/meta/_journal.json @@ -0,0 +1,20 @@ +{ + "version": "7", + "dialect": "postgresql", + "entries": [ + { + "idx": 0, + "version": "7", + "when": 1728143079049, + "tag": "0000_initial_schema", + "breakpoints": true + }, + { + "idx": 1, + "version": "7", + "when": 1730968227580, + "tag": "0001_add_admin_user", + "breakpoints": true + } + ] +} \ No newline at end of file diff --git a/backend/user/entrypoint.sh b/backend/user/entrypoint.sh new file mode 100755 index 0000000000..bb1695f758 --- /dev/null +++ b/backend/user/entrypoint.sh @@ -0,0 +1,13 @@ +#!/bin/sh + +# Drizzle will handle its own logic to remove conflicts +npm run db:prod:migrate + +# Checks admin table and will not seed if data exists +npm run db:prod:seed + +rm -rf drizzle src tsconfig.json + +npm uninstall tsx drizzle-kit + +npm run start diff --git a/backend/user/express.Dockerfile b/backend/user/express.Dockerfile new file mode 100644 index 0000000000..dd566330e7 --- /dev/null +++ b/backend/user/express.Dockerfile @@ -0,0 +1,26 @@ +FROM node:lts-alpine AS build +WORKDIR /data/user-express +COPY package*.json ./ +RUN npm install +ARG env +COPY . . +RUN npm run build + +FROM node:lts-alpine AS production +WORKDIR /data/user-express +COPY --from=build /data/user-express/package*.json ./ +COPY --from=build --chown=node:node /data/user-express/dist ./dist + +RUN npm ci --omit=dev +# For migration +RUN npm install tsx drizzle-kit +COPY drizzle ./drizzle +COPY src/lib/db/ ./src/lib/db +COPY src/lib/passwords ./src/lib/passwords +COPY src/config.ts ./src +COPY tsconfig.json . +COPY entrypoint.sh . + +ARG port +EXPOSE ${port} +ENTRYPOINT [ "/bin/sh", "entrypoint.sh" ] \ No newline at end of file diff --git a/backend/user/package.json b/backend/user/package.json new file mode 100644 index 0000000000..f053d155ea --- /dev/null +++ b/backend/user/package.json @@ -0,0 +1,58 @@ +{ + "name": "user", + "version": "1.0.0", + "main": "dist/index.js", + "scripts": { + "dev": "env-cmd -f .env.local nodemon src/index.ts | pino-pretty", + "build": "tsc && tsc-alias", + "start": "node dist/index.js", + "build:local": "env-cmd -f .env.local tsc && tsc-alias", + "start:local": "env-cmd -f .env.local node dist/index.js", + "db:generate": "env-cmd -f .env.local drizzle-kit generate", + "db:migrate": "env-cmd -f .env.local tsx ./src/lib/db/migrate.ts", + "db:seed": "env-cmd -f .env.local tsx ./src/lib/db/seed.ts", + "db:prod:migrate": "tsx ./src/lib/db/migrate.ts", + "db:prod:seed": "tsx ./src/lib/db/seed.ts", + "db:inspect": "env-cmd -f .env.local drizzle-kit studio", + "fmt": "prettier --config .prettierrc src --write", + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "description": "", + "dependencies": { + "bcrypt": "^5.1.1", + "cookie-parser": "^1.4.6", + "cors": "^2.8.5", + "dotenv": "^16.4.5", + "drizzle-orm": "^0.33.0", + "env-cmd": "^10.1.0", + "express": "^4.21.0", + "express-rate-limit": "^7.4.0", + "helmet": "^7.1.0", + "http-status-codes": "^2.3.0", + "jsonwebtoken": "^9.0.2", + "pino": "^9.4.0", + "pino-http": "^10.3.0", + "postgres": "^3.4.4" + }, + "devDependencies": { + "@swc/core": "^1.7.26", + "@swc/helpers": "^0.5.13", + "@types/bcrypt": "^5.0.2", + "@types/cookie-parser": "^1.4.7", + "@types/cors": "^2.8.17", + "@types/express": "^4.17.21", + "@types/jsonwebtoken": "^9.0.6", + "@types/node": "^22.5.5", + "drizzle-kit": "^0.24.2", + "nodemon": "^3.1.4", + "pino-pretty": "^11.2.2", + "regenerator-runtime": "^0.14.1", + "ts-node": "^10.9.2", + "tsc-alias": "^1.8.10", + "tsconfig-paths": "^4.2.0", + "tsx": "^4.19.1" + } +} diff --git a/backend/user/src/config.ts b/backend/user/src/config.ts new file mode 100644 index 0000000000..1639d88889 --- /dev/null +++ b/backend/user/src/config.ts @@ -0,0 +1,17 @@ +import 'dotenv/config'; + +export const JWT_SECRET_KEY = process.env.EXPRESS_JWT_SECRET_KEY!; + +export const UI_HOST = process.env.PEERPREP_UI_HOST!; + +export const EXPRESS_PORT = process.env.EXPRESS_PORT; + +export const dbConfig = { + host: process.env.EXPRESS_DB_HOST!, + port: Number.parseInt(process.env.EXPRESS_DB_PORT!), + database: process.env.POSTGRES_DB!, + user: process.env.POSTGRES_USER, + password: process.env.POSTGRES_PASSWORD, +}; + +export const LOAD_TEST_POD = process.env.LOAD_TEST_POD || 'http://user-service-load-test'; diff --git a/backend/user/src/controllers/auth-check/index.ts b/backend/user/src/controllers/auth-check/index.ts new file mode 100644 index 0000000000..eccbdabd26 --- /dev/null +++ b/backend/user/src/controllers/auth-check/index.ts @@ -0,0 +1,34 @@ +import { eq } from 'drizzle-orm'; +import { StatusCodes } from 'http-status-codes'; + +import { COOKIE_NAME, decodeCookie, isCookieValid } from '@/lib/cookies'; +import { db, users } from '@/lib/db'; +import { logger } from '@/lib/utils'; +import { IRouteHandler } from '@/types'; + +export const checkIsAuthed: IRouteHandler = async (req, res) => { + const cookie: string | undefined = req.cookies[COOKIE_NAME]; + + if (cookie && isCookieValid(cookie)) { + const decoded = decodeCookie(cookie); + const expireTimeInMillis = decoded.exp * 1000; + logger.info( + '[/auth-check/check-is-authed]: Expires At ' + new Date(expireTimeInMillis).toLocaleString() + ); + const user = await db + .select({ name: users.username, isAdmin: users.isAdmin, email: users.email }) + .from(users) + .where(eq(users.id, decoded.id)) + .limit(1); + return res.status(StatusCodes.OK).json({ + message: 'OK', + expiresAt: expireTimeInMillis, + userId: decoded.id, + username: user.length > 0 ? user[0].name : undefined, + email: user.length > 0 ? user[0].email : undefined, + isAdmin: user.length > 0 ? user[0].isAdmin : undefined, + }); + } + + return res.status(StatusCodes.UNAUTHORIZED).json('Unauthorised'); +}; diff --git a/backend/user/src/controllers/auth/index.ts b/backend/user/src/controllers/auth/index.ts new file mode 100644 index 0000000000..017ccdd3c5 --- /dev/null +++ b/backend/user/src/controllers/auth/index.ts @@ -0,0 +1,113 @@ +import { StatusCodes } from 'http-status-codes'; + +import { COOKIE_NAME } from '@/lib/cookies'; +import { + checkEmailValidService, + checkUsernameValidService, + type IEmailValidPayload, + type ILoginPayload, + type IRegisterPayload, + type IUsernameValidPayload, + loginService, + registerService, +} from '@/services/auth'; +import type { IRouteHandler } from '@/types'; + +export const login: IRouteHandler = async (req, res) => { + const { username, password }: Partial = req.body; + + if (!username || !password) { + return res.status(StatusCodes.UNPROCESSABLE_ENTITY).json('Malformed Request'); + } + + const { code, data, error } = await loginService({ username, password }); + + if (error || code !== StatusCodes.OK || !data) { + const sanitizedErr = error?.message ?? 'An error occurred.'; + return res.status(code).json(sanitizedErr); + } + + return res + .status(StatusCodes.OK) + .cookie(COOKIE_NAME, data.cookie, { + httpOnly: true, + secure: false, // For HTTPS: Set true + sameSite: 'lax', + path: '/', + }) + .json(data.user); +}; + +export const logout: IRouteHandler = async (_req, res) => { + return res + .clearCookie(COOKIE_NAME, { + secure: true, + sameSite: 'none', + }) + .status(StatusCodes.OK) + .json('User has been logged out.'); +}; + +export const register: IRouteHandler = async (req, res) => { + //Extract the registration data from the request body + const { email, username, password, firstName, lastName }: Partial = req.body; + + //Validate input + if (!username || !password || !email || !firstName || !lastName) { + return res.status(StatusCodes.UNPROCESSABLE_ENTITY).json('Malformed Request'); + } + + //Call the registration service + const { code, data, error } = await registerService({ + email, + username, + firstName, + lastName, + password, + }); + + //Handle errors + if (error || code !== StatusCodes.CREATED || !data) { + const sanitizedErr = error?.message ?? 'An error occurred during registration.'; + return res.status(code).json(sanitizedErr); + } + + return res.status(StatusCodes.CREATED).json({ + message: 'User registered successfully', + user: data.user, // Return user data if needed + }); +}; + +export const checkUsernameValid: IRouteHandler = async (req, res) => { + const { username }: IUsernameValidPayload = req.body; + + if (!username) { + return res.status(StatusCodes.UNPROCESSABLE_ENTITY).json('Malformed Request'); + } + + const { code, data, error } = await checkUsernameValidService({ username }); + + if (error || code !== StatusCodes.OK || !data) { + const sanitizedErr = error?.message ?? 'An error occurred.'; + return res.status(code).json(sanitizedErr); + } + + return res.status(StatusCodes.OK).json(data); +}; + +export const checkEmailValid: IRouteHandler = async (req, res) => { + const { email }: IEmailValidPayload = req.body; + + if (!email) { + return res.status(StatusCodes.UNPROCESSABLE_ENTITY).json('Malformed Request'); + } + + const { code, data, error } = await checkEmailValidService({ email }); + + if (error || code !== StatusCodes.OK || !data) { + const sanitizedErr = error?.message ?? 'An error occurred.'; + return res.status(code).json(sanitizedErr); + } + + return res.status(StatusCodes.OK).json(data); +}; diff --git a/backend/user/src/controllers/questions/index.ts b/backend/user/src/controllers/questions/index.ts new file mode 100644 index 0000000000..ac94679827 --- /dev/null +++ b/backend/user/src/controllers/questions/index.ts @@ -0,0 +1,38 @@ +import { StatusCodes } from 'http-status-codes'; + +import { addAttemptedQuestionService, getAttemptedQuestionsService } from '@/services/questions'; +import type { IRouteHandler } from '@/types'; + +export const addAttemptedQuestion: IRouteHandler = async (req, res) => { + const { questionId, userIds } = req.body; // Assuming the questionId is passed in the request body + + if (!userIds || !questionId) { + return res.status(StatusCodes.BAD_REQUEST).json('User ID and Question ID are required'); + } + + const { code, data, error } = await addAttemptedQuestionService(userIds, questionId); + + if (error || code !== StatusCodes.OK || !data) { + const sanitizedErr = error?.message ?? 'An error occurred.'; + return res.status(code).json(sanitizedErr); + } + + return res.status(StatusCodes.OK).json(data); +}; + +export const getAttemptedQuestions: IRouteHandler = async (req, res) => { + const userId = req.body.userId; + + if (!userId) { + return res.status(StatusCodes.BAD_REQUEST).json('User ID is required'); + } + + const { code, data, error } = await getAttemptedQuestionsService(userId); + + if (error || code !== StatusCodes.OK || !data) { + const sanitizedErr = error?.message ?? 'An error occurred.'; + return res.status(code).json(sanitizedErr); + } + + return res.status(StatusCodes.OK).json(data); +}; diff --git a/backend/user/src/index.ts b/backend/user/src/index.ts new file mode 100644 index 0000000000..2cb2a3e792 --- /dev/null +++ b/backend/user/src/index.ts @@ -0,0 +1,11 @@ +import { EXPRESS_PORT } from '@/config'; +import { logger } from '@/lib/utils'; +import app, { dbHealthCheck } from '@/server'; + +const port = Number.parseInt(EXPRESS_PORT ?? '8001'); + +const listenMessage = `App listening on port: ${port}`; +app.listen(port, async () => { + await dbHealthCheck(false); + logger.info(listenMessage); +}); diff --git a/backend/user/src/lib/cookies/index.ts b/backend/user/src/lib/cookies/index.ts new file mode 100644 index 0000000000..64ab9f81f1 --- /dev/null +++ b/backend/user/src/lib/cookies/index.ts @@ -0,0 +1,45 @@ +import jwt from 'jsonwebtoken'; + +import { JWT_SECRET_KEY } from '@/config'; + +export const COOKIE_NAME = 'peerprep-user-session'; + +export const generateCookie = (payload: T) => { + return jwt.sign(payload, JWT_SECRET_KEY, { + expiresIn: '30m', + }); +}; + +export const isCookieValid = (cookie: string) => { + try { + return jwt.verify(cookie, JWT_SECRET_KEY, { + ignoreExpiration: false, + }); + } catch (error) { + return false; + } +}; + +export type CookiePayload = { + id: string; +}; + +type CookieType = T & { + iat: number; + exp: number; +}; + +export const decodeCookie = (cookie: string) => { + const decoded = jwt.decode(cookie) as CookieType; + + return decoded; +}; + +// TODO: Insert proper cookie validity logic and middleware +export const isAuthed = (cookie: string, payload: T) => { + if (!isCookieValid(cookie) || decodeCookie(cookie) !== payload) { + return false; + } + + return true; +}; diff --git a/backend/user/src/lib/db/index.ts b/backend/user/src/lib/db/index.ts new file mode 100644 index 0000000000..5e8d35fe9b --- /dev/null +++ b/backend/user/src/lib/db/index.ts @@ -0,0 +1,12 @@ +import { drizzle } from 'drizzle-orm/postgres-js'; +import postgres from 'postgres'; + +import { dbConfig } from '@/config'; + +const queryClient = postgres({ + ...dbConfig, +}); + +export const db = drizzle(queryClient); + +export * from './schema'; diff --git a/backend/user/src/lib/db/migrate.ts b/backend/user/src/lib/db/migrate.ts new file mode 100644 index 0000000000..a012ab160a --- /dev/null +++ b/backend/user/src/lib/db/migrate.ts @@ -0,0 +1,21 @@ +import { drizzle } from 'drizzle-orm/postgres-js'; +import { migrate } from 'drizzle-orm/postgres-js/migrator'; +import postgres from 'postgres'; + +const config = { + host: process.env.EXPRESS_DB_HOST!, + port: Number.parseInt(process.env.EXPRESS_DB_PORT!), + database: process.env.POSTGRES_DB, + user: process.env.POSTGRES_USER, + password: process.env.POSTGRES_PASSWORD, +}; +const migrationConnection = postgres({ ...config, max: 1 }); + +const db = drizzle(migrationConnection); + +const main = async () => { + await migrate(db, { migrationsFolder: 'drizzle' }); + await migrationConnection.end(); +}; + +void main(); diff --git a/backend/user/src/lib/db/schema.ts b/backend/user/src/lib/db/schema.ts new file mode 100644 index 0000000000..e4743c1731 --- /dev/null +++ b/backend/user/src/lib/db/schema.ts @@ -0,0 +1,32 @@ +import { + boolean, + integer, + pgEnum, + pgTable, + smallint, + timestamp, + uuid, + varchar, +} from 'drizzle-orm/pg-core'; + +// Define the user table +export const users = pgTable('users', { + id: uuid('id').primaryKey().notNull().defaultRandom(), // UUID as primary key with a default random value + email: varchar('email', { length: 255 }).unique().notNull(), // Email field, unique and required + username: varchar('username', { length: 255 }).unique().notNull(), // Username field, unique and required + firstName: varchar('first_name', { length: 255 }).notNull(), // First name field, required + lastName: varchar('last_name', { length: 255 }).notNull(), // Last name field, required + password: varchar('password', { length: 255 }).notNull(), // Password field, required (hashed password) + failedAttempts: smallint('failed_attempts').default(0), // Failed counts + unlockTime: timestamp('unlock_time', { precision: 6, withTimezone: true }), // If failed counts > limit, block all attempts until this time. + attemptedQuestions: integer('attempted_questions').array(), + isAdmin: boolean('is_admin').default(false), +}); + +export const actionEnum = pgEnum('action', ['SEED']); + +export const admin = pgTable('admin', { + id: uuid('id').primaryKey().notNull().defaultRandom(), + createdAt: timestamp('created_at').defaultNow(), + action: actionEnum('action').notNull(), +}); diff --git a/backend/user/src/lib/db/seed.ts b/backend/user/src/lib/db/seed.ts new file mode 100644 index 0000000000..1595b03495 --- /dev/null +++ b/backend/user/src/lib/db/seed.ts @@ -0,0 +1,77 @@ +import { eq, InferInsertModel } from 'drizzle-orm'; + +import { generatePasswordHash } from '@/lib/passwords'; + +import { admin as adminTable, db, users as usersTable } from '.'; + +const TEST_USER_CREDENTIALS: Array> = [ + { + username: 'testuser01', + email: 'test_user_01@email.com', + firstName: 'test', + lastName: 'user01', + password: '12345678', // For local testing purposes + }, + { + username: 'testuser02', + email: 'test_user_02@email.com', + firstName: 'test', + lastName: 'user02', + password: '123456789', // For local testing purposes + }, + { + username: 'adminuser01', + email: 'admin_user@email.com', + firstName: 'admin', + lastName: 'user01', + password: 'IamPeerprepAdmin!9', + isAdmin: true, + }, +]; + +const main = async () => { + await db.transaction(async (tx) => { + // Clear all users + try { + const seedRecords = await tx.select().from(adminTable).where(eq(adminTable.action, 'SEED')); + + if (seedRecords && seedRecords.length > 0) { + console.info( + `[Users]: Seeded already at: ${(seedRecords[seedRecords.length - 1].createdAt ?? new Date()).toLocaleString()}` + ); + return; + } + + await tx.delete(usersTable); + + for (const { password: ptPass, ...creds } of TEST_USER_CREDENTIALS) { + const password = generatePasswordHash(ptPass); + // Insert + await tx + .insert(usersTable) + .values({ + ...creds, + password, + }) + .onConflictDoNothing(); + } + + await tx.insert(adminTable).values({ action: 'SEED' }); + } catch (error) { + console.error('[Users]: An error occurred while seeding: ' + String(error)); + process.exit(1); + } + }); +}; + +void main() + .catch((err) => { + if (err !== null) { + console.error('[Users]: Error occurred during seeding: ' + String(err)); + process.exit(1); + } + }) + .then(() => { + console.log('[Users]: Seeding completed successfully'); + process.exit(0); + }); diff --git a/backend/user/src/lib/passwords/index.ts b/backend/user/src/lib/passwords/index.ts new file mode 100644 index 0000000000..5e6df1ab8f --- /dev/null +++ b/backend/user/src/lib/passwords/index.ts @@ -0,0 +1,9 @@ +import bcrypt from 'bcrypt'; + +export const getIsPasswordValid = (payload: string, actualPassword: string) => { + return bcrypt.compareSync(payload, actualPassword); +}; + +export const generatePasswordHash = (password: string) => { + return bcrypt.hashSync(password, 10); +}; diff --git a/backend/user/src/lib/ratelimit/index.ts b/backend/user/src/lib/ratelimit/index.ts new file mode 100644 index 0000000000..9e025c9c9d --- /dev/null +++ b/backend/user/src/lib/ratelimit/index.ts @@ -0,0 +1,8 @@ +import { rateLimit } from 'express-rate-limit'; + +export const limiter = rateLimit({ + windowMs: 5 * 60 * 1000, // 5 minutes + limit: 10, // each IP can make up to 10 requests per `windowsMs` (5 minutes) + standardHeaders: true, // add the `RateLimit-*` headers to the response + legacyHeaders: false, // remove the `X-RateLimit-*` headers from the response +}); diff --git a/backend/user/src/lib/utils/index.ts b/backend/user/src/lib/utils/index.ts new file mode 100644 index 0000000000..1ff09efd40 --- /dev/null +++ b/backend/user/src/lib/utils/index.ts @@ -0,0 +1 @@ +export * from './logger'; diff --git a/backend/user/src/lib/utils/logger.ts b/backend/user/src/lib/utils/logger.ts new file mode 100644 index 0000000000..e41655d003 --- /dev/null +++ b/backend/user/src/lib/utils/logger.ts @@ -0,0 +1,3 @@ +import pinoLogger from 'pino'; + +export const logger = pinoLogger(); diff --git a/backend/user/src/routes/auth-check.ts b/backend/user/src/routes/auth-check.ts new file mode 100644 index 0000000000..f69670f9ec --- /dev/null +++ b/backend/user/src/routes/auth-check.ts @@ -0,0 +1,9 @@ +import express from 'express'; + +import { checkIsAuthed } from '@/controllers/auth-check'; + +const router = express.Router(); + +router.get('/is-authed', checkIsAuthed); + +export default router; diff --git a/backend/user/src/routes/auth.ts b/backend/user/src/routes/auth.ts new file mode 100644 index 0000000000..b3212b6f96 --- /dev/null +++ b/backend/user/src/routes/auth.ts @@ -0,0 +1,16 @@ +import express from 'express'; + +import { checkEmailValid, checkUsernameValid, login, logout, register } from '@/controllers/auth'; +import { limiter } from '@/lib/ratelimit'; + +const router = express.Router(); + +router.post('/login', login); +router.post('/logout', logout); +router.post('/register', register); +router.post('/username-valid', checkUsernameValid); +router.post('/email-valid', checkEmailValid); + +router.use(limiter); + +export default router; diff --git a/backend/user/src/routes/user.ts b/backend/user/src/routes/user.ts new file mode 100644 index 0000000000..74807563e6 --- /dev/null +++ b/backend/user/src/routes/user.ts @@ -0,0 +1,10 @@ +import express from 'express'; + +import { addAttemptedQuestion, getAttemptedQuestions } from '@/controllers/questions'; + +const router = express.Router(); + +router.post('/attempted-question/get', getAttemptedQuestions); +router.post('/attempted-question/add', addAttemptedQuestion); + +export default router; diff --git a/backend/user/src/server.ts b/backend/user/src/server.ts new file mode 100644 index 0000000000..e70dbd5d75 --- /dev/null +++ b/backend/user/src/server.ts @@ -0,0 +1,72 @@ +import { exit } from 'process'; + +import cookieParser from 'cookie-parser'; +import cors from 'cors'; +import { sql } from 'drizzle-orm'; +import express, { json } from 'express'; +import helmet from 'helmet'; +import { StatusCodes } from 'http-status-codes'; +import pino from 'pino-http'; + +import { dbConfig, LOAD_TEST_POD, UI_HOST } from '@/config'; +import { db } from '@/lib/db'; +import { logger } from '@/lib/utils'; +import authRoutes from '@/routes/auth'; +import authCheckRoutes from '@/routes/auth-check'; +import userRoutes from '@/routes/user'; + +const app = express(); +app.use( + pino({ + serializers: { + req: ({ id, method, url, headers: { host, referer }, query, params }) => ({ + id, + method, + url, + headers: { host, referer }, + query, + params, + }), + res: ({ statusCode }) => ({ statusCode }), + }, + }) +); +app.use(json()); +app.use(helmet()); +app.use(cookieParser()); +app.use( + cors({ + origin: [UI_HOST, LOAD_TEST_POD], + credentials: true, + }) +); + +app.use('/auth', authRoutes); +app.use('/auth-check', authCheckRoutes); +app.use('/user', userRoutes); + +// Health Check for Docker +app.get('/health', (_req, res) => res.status(StatusCodes.OK).send('OK')); + +export const dbHealthCheck = async (exitApp: boolean = true) => { + try { + await db.execute(sql`SELECT 1`); + logger.info('Connected to DB'); + } catch (error) { + const { message } = error as Error; + logger.error('Cannot connect to DB: ' + message); + logger.error(`DB Config: ${JSON.stringify(dbConfig)}`); + + if (exitApp) { + exit(1); + } + } +}; + +// Ensure DB service is up before running. +app.get('/test-db', async (_req, res) => { + await dbHealthCheck(false); + res.json({ message: 'OK ' }); +}); + +export default app; diff --git a/backend/user/src/services/auth/index.ts b/backend/user/src/services/auth/index.ts new file mode 100644 index 0000000000..f372300b96 --- /dev/null +++ b/backend/user/src/services/auth/index.ts @@ -0,0 +1,4 @@ +export * from './login'; +export * from './register'; +export * from './types'; +export * from './validity_check'; diff --git a/backend/user/src/services/auth/login.ts b/backend/user/src/services/auth/login.ts new file mode 100644 index 0000000000..34180a7006 --- /dev/null +++ b/backend/user/src/services/auth/login.ts @@ -0,0 +1,92 @@ +import { eq, getTableColumns, sql } from 'drizzle-orm'; +import { StatusCodes } from 'http-status-codes'; + +import { CookiePayload, generateCookie } from '@/lib/cookies'; +import { db, users } from '@/lib/db'; +import { getIsPasswordValid } from '@/lib/passwords'; + +import type { ILoginPayload, ILoginResponse } from './types'; + +const _FAILED_ATTEMPTS_ALLOWED = 3; + +const _getSchema = () => { + const { id, username, password, email, failedAttempts, unlockTime } = getTableColumns(users); + return { + id, + username, + password, + email, + failedAttempts, + unlockTime, + }; +}; + +export const loginService = async (payload: ILoginPayload): Promise => { + const rows = await db + .select(_getSchema()) + .from(users) + .where(eq(users.username, payload.username)) + .limit(1); + + // 1. Cannot find + if (rows.length === 0) { + return { + code: StatusCodes.NOT_FOUND, + error: { + message: 'Not Found', + }, + }; + } + + const { unlockTime, password, failedAttempts, ...user } = rows[0]; + + // 2. Locked out + if (unlockTime !== null) { + const currentTime = new Date(); + + if (unlockTime > currentTime) { + return { + code: StatusCodes.CONFLICT, + error: { + message: 'Too many failed attempts - try again later', + }, + }; + } + } + + // 3. Wrong Password + const isPasswordValid = getIsPasswordValid(payload.password, password); + + if (!isPasswordValid) { + const newFailedAttempts = (failedAttempts ?? 0) + 1; + const updateValues = { + failedAttempts: newFailedAttempts, + unlockTime: + newFailedAttempts >= _FAILED_ATTEMPTS_ALLOWED ? sql`NOW() + INTERVAL '1 hour'` : undefined, + }; + await db.update(users).set(updateValues).where(eq(users.username, payload.username)); + return { + code: StatusCodes.UNAUTHORIZED, + error: { + message: 'Incorrect Password', + }, + }; + } + + // 4. Correct Password + if ((failedAttempts !== null && failedAttempts > 0) || unlockTime !== null) { + await db.update(users).set({ + failedAttempts: null, + unlockTime: null, + }); + } + + const jwtToken = generateCookie({ id: user.id }); + return { + code: StatusCodes.OK, + data: { + cookie: jwtToken, + user, + }, + }; +}; diff --git a/backend/user/src/services/auth/register.ts b/backend/user/src/services/auth/register.ts new file mode 100644 index 0000000000..73dc8aab87 --- /dev/null +++ b/backend/user/src/services/auth/register.ts @@ -0,0 +1,61 @@ +import { eq, getTableColumns, or } from 'drizzle-orm'; +import { StatusCodes } from 'http-status-codes'; + +import { db, users } from '@/lib/db'; +import { generatePasswordHash } from '@/lib/passwords'; + +import type { IRegisterPayload } from './types'; + +const _getSchema = () => { + const { id, email, username, firstName, lastName } = getTableColumns(users); + return { + id, + email, + username, + firstName, + lastName, + }; +}; + +export const registerService = async (payload: IRegisterPayload) => { + const { email, username, firstName, lastName, password } = payload; + + //check if user already exists (by username or email) + const existingUsers = await db + .select(_getSchema()) + .from(users) + .where(or(eq(users.username, username), eq(users.email, email))) + .limit(1); + + if (existingUsers.length > 0) { + return { + code: StatusCodes.CONFLICT, + error: { + message: 'User with this username or email already exists', + }, + }; + } + + //hash the password + const hashedPassword = generatePasswordHash(password); + + //insert new user into the database + const [newUser] = await db + .insert(users) + .values({ + email, + username, + firstName, + lastName, + password: hashedPassword, //store the hashed password + }) + .returning(_getSchema()); + + // return success response with the JWT token + return { + code: StatusCodes.CREATED, + data: { + user: newUser, + }, + }; +}; diff --git a/backend/user/src/services/auth/types.ts b/backend/user/src/services/auth/types.ts new file mode 100644 index 0000000000..26c15c7f34 --- /dev/null +++ b/backend/user/src/services/auth/types.ts @@ -0,0 +1,70 @@ +import type { IServiceResponse } from '@/types'; + +//============================================================================= +// /auth/login +//============================================================================= + +export type ILoginPayload = { + username: string; + password: string; +}; + +export type ILoginResponse = IServiceResponse<{ + // To be extracted by controller and set on the HTTP response + cookie: string; + // Payload for controller + user: { + id: string; + username: string; + email: string; + }; +}>; + +//============================================================================= +// /auth/logout +//============================================================================= +// eslint-disable-next-line @typescript-eslint/no-empty-object-type +export type ILogoutPayload = {}; + +//============================================================================= +// /auth/register (TBC) +//============================================================================= +export type IRegisterPayload = { + username: string; + firstName: string; + lastName: string; + password: string; + email: string; +}; + +export type IRegisterResponse = IServiceResponse<{ + user: { + username: string; + email: string; + firstName: string; + lastName: string; + }; +}>; +// export type IRegisterResponse = Awaited> + +//============================================================================= +// /auth/username-valid +//============================================================================= +export type IUsernameValidPayload = { + username: string; +}; + +export type IUsernameValidResponse = IServiceResponse<{ + valid: boolean; +}>; + +//============================================================================= +// /auth/email-valid +//============================================================================= +export type IEmailValidPayload = { + email: string; +}; + +export type IEmailValidResponse = IServiceResponse<{ + valid: boolean; +}>; diff --git a/backend/user/src/services/auth/validity_check.ts b/backend/user/src/services/auth/validity_check.ts new file mode 100644 index 0000000000..b922927ecd --- /dev/null +++ b/backend/user/src/services/auth/validity_check.ts @@ -0,0 +1,71 @@ +import { eq, sql } from 'drizzle-orm'; +import { StatusCodes } from 'http-status-codes'; + +import { db, users } from '@/lib/db'; + +import { + IEmailValidPayload, + IEmailValidResponse, + IUsernameValidPayload, + IUsernameValidResponse, +} from './types'; + +export const checkUsernameValidService = async ( + payload: IUsernameValidPayload +): Promise => { + const { username } = payload; + + try { + const result = await db + .select({ count: sql`count(*)` }) + .from(users) + .where(eq(users.username, username)); + + const isValid = result[0].count === 0; + + return { + code: StatusCodes.OK, + data: { + valid: isValid, + }, + }; + } catch (error) { + console.error('Error checking username availability:', error); + return { + code: StatusCodes.INTERNAL_SERVER_ERROR, + error: { + message: 'An error occurred while checking username availability', + }, + }; + } +}; + +export const checkEmailValidService = async ( + payload: IEmailValidPayload +): Promise => { + const { email } = payload; + + try { + const result = await db + .select({ count: sql`count(*)` }) + .from(users) + .where(eq(users.email, email)); + + const isValid = result[0].count === 0; + + return { + code: StatusCodes.OK, + data: { + valid: isValid, + }, + }; + } catch (error) { + console.error('Error checking email availability:', error); + return { + code: StatusCodes.INTERNAL_SERVER_ERROR, + error: { + message: 'An error occurred while checking email availability', + }, + }; + } +}; diff --git a/backend/user/src/services/questions/index.ts b/backend/user/src/services/questions/index.ts new file mode 100644 index 0000000000..4086d23d06 --- /dev/null +++ b/backend/user/src/services/questions/index.ts @@ -0,0 +1,96 @@ +import { eq, sql } from 'drizzle-orm'; +import { StatusCodes } from 'http-status-codes'; + +import { db } from '@/lib/db'; +import { users } from '@/lib/db/schema'; + +interface IGetAttemptedQuestionsResponse { + code: StatusCodes; + data?: Array; + error?: Error; +} + +interface IAddAttemptedQuestionResponse { + code: StatusCodes; + data?: { message: string }; + error?: Error; +} + +export const addAttemptedQuestionService = async ( + userIds: Array, + questionId: number +): Promise => { + try { + // Check if the users exist + const userRecords = await db + .select({ id: users.id, attemptedQuestions: users.attemptedQuestions }) + .from(users) + .where(sql`${users.id} = ANY(${userIds})`); + + if (userRecords.length === 0) { + return { + code: StatusCodes.NOT_FOUND, + error: new Error('No users found'), + }; + } + + // Update attemptedQuestions for each user + const updatePromises = userRecords.map((user) => { + const attemptedQuestions = user.attemptedQuestions || []; + + if (!attemptedQuestions.includes(questionId)) { + return db + .update(users) + .set({ + attemptedQuestions: sql`array_append(${users.attemptedQuestions}, ${questionId})`, + }) + .where(eq(users.id, user.id)); + } + + return Promise.resolve(); // No update needed if question already attempted + }); + + await Promise.all(updatePromises); + + return { + code: StatusCodes.OK, + data: { message: 'Question added to attempted questions for applicable users' }, + }; + } catch (error) { + console.error('Error adding attempted question:', error); + return { + code: StatusCodes.INTERNAL_SERVER_ERROR, + error: new Error('An error occurred while adding the attempted question'), + }; + } +}; + +export const getAttemptedQuestionsService = async ( + userId: string +): Promise => { + try { + const result = await db + .select({ attemptedQuestions: users.attemptedQuestions }) + .from(users) + .where(eq(users.id, userId)) + .limit(1); + + if (result.length === 0) { + return { + code: StatusCodes.NOT_FOUND, + error: new Error('User not found'), + }; + } + + return { + code: StatusCodes.OK, + data: result[0].attemptedQuestions || [], + }; + } catch (error) { + console.error('Error fetching attempted questions:', error); + return { + code: StatusCodes.INTERNAL_SERVER_ERROR, + error: new Error('An error occurred while fetching attempted questions'), + }; + } +}; diff --git a/backend/user/src/types/index.ts b/backend/user/src/types/index.ts new file mode 100644 index 0000000000..6738aa624a --- /dev/null +++ b/backend/user/src/types/index.ts @@ -0,0 +1 @@ +export * from './utility'; diff --git a/backend/user/src/types/utility.ts b/backend/user/src/types/utility.ts new file mode 100644 index 0000000000..09e35c42c3 --- /dev/null +++ b/backend/user/src/types/utility.ts @@ -0,0 +1,12 @@ +import type { Request, Response } from 'express'; +import type { StatusCodes } from 'http-status-codes'; + +export type IServiceResponse = { + code: StatusCodes; + error?: { + message: string; + }; + data?: T; +}; + +export type IRouteHandler = (req: Request, res: Response) => Promise>; diff --git a/backend/user/tsconfig.json b/backend/user/tsconfig.json new file mode 100644 index 0000000000..c18b7e3c96 --- /dev/null +++ b/backend/user/tsconfig.json @@ -0,0 +1,110 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig to read more about this file */ + /* Projects */ + // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ + // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ + // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ + // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ + // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ + // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ + /* Language and Environment */ + "target": "ES2022", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ + // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + // "jsx": "preserve", /* Specify what JSX code is generated. */ + // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ + // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ + // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ + // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ + // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ + // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ + // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ + // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ + // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ + /* Modules */ + "module": "commonjs", /* Specify what module code is generated. */ + "rootDir": "./src", /* Specify the root folder within your source files. */ + // "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */ + "baseUrl": ".", /* Specify the base directory to resolve non-relative module names. */ + "paths": { + "@/*": ["./src/*"] + }, /* Specify a set of entries that re-map imports to additional lookup locations. */ + // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ + // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ + // "types": [], /* Specify type package names to be included without being referenced in a source file. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ + // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ + // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ + // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ + // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ + // "resolveJsonModule": true, /* Enable importing .json files. */ + // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ + // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ + /* JavaScript Support */ + // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ + // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ + // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ + /* Emit */ + // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ + // "declarationMap": true, /* Create sourcemaps for d.ts files. */ + // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ + // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ + // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ + // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ + "outDir": "./dist", /* Specify an output folder for all emitted files. */ + // "removeComments": true, /* Disable emitting comments. */ + // "noEmit": true, /* Disable emitting files from a compilation. */ + // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ + // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ + // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ + // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ + // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ + // "newLine": "crlf", /* Set the newline character for emitting files. */ + // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ + // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ + // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ + // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ + // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ + // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ + /* Interop Constraints */ + // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ + // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ + // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ + "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ + // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ + "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ + /* Type Checking */ + "strict": true, /* Enable all strict type-checking options. */ + // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ + // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ + // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ + // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ + // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ + // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ + // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ + // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ + // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ + // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ + // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ + // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ + // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ + // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ + // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ + // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ + /* Completeness */ + // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ + "skipLibCheck": true /* Skip type checking all .d.ts files. */ + }, + "exclude": [ + "drizzle.*.*ts" + ], + "ts-node": { + "swc": true, + "require": ["tsconfig-paths/register"] + } +} \ No newline at end of file diff --git a/docker-compose.local.yaml b/docker-compose.local.yaml new file mode 100644 index 0000000000..6e5e6d6fcd --- /dev/null +++ b/docker-compose.local.yaml @@ -0,0 +1,90 @@ +# Command: docker-compose --env-file .env.local -f docker-compose.local.yaml up -d + +services: + user-db: + hostname: 'user-db' + image: postgres:16.4 + container_name: 'user-db' + build: + context: ./backend/user/src/lib/db + env_file: + - ./backend/user/.env.local + volumes: + - 'user-db-docker:${USER_PGDATA}' + ports: + - '${USER_EXPRESS_DB_PORT}:5432' + restart: unless-stopped + + question-db: + hostname: 'question-db' + image: postgres:16.4 + container_name: 'question-db' + build: + context: ./backend/question/src/lib/db + env_file: + - ./backend/question/.env.local + volumes: + - 'question-db-docker:${QUESTION_PGDATA}' + # - ./init.sql:/docker-entrypoint-initdb.d/init.sql + ports: + - '${QUESTION_EXPRESS_DB_PORT}:5432' + restart: unless-stopped + + collab-db: + hostname: 'collab-db' + image: postgres:16.4 + container_name: 'collab-db' + env_file: + - ./backend/collaboration/.env.local + volumes: + - 'collab-db-docker:${COLLAB_PGDATA}' + # - ./init.sql:/docker-entrypoint-initdb.d/init.sql + ports: + - '${COLLAB_EXPRESS_DB_PORT}:5432' + restart: unless-stopped + + match-db: + hostname: 'match-db' + image: redis/redis-stack + container_name: 'match-db' + env_file: + - ./backend/matching/.env.local + volumes: + - 'match-db-docker:/data' + ports: + - '${MATCHING_DB_HOST_MGMT_PORT}:8001' + - '${MATCHING_DB_HOST_PORT}:6379' + restart: unless-stopped + + chat-db: + hostname: 'chat-db' + image: postgres:16.4 + container_name: 'chat-db' + env_file: + - ./backend/chat/.env.local + volumes: + - 'chat-db-docker:${CHAT_PGDATA}' + # - ./init.sql:/docker-entrypoint-initdb.d/init.sql + ports: + - '${CHAT_EXPRESS_DB_PORT}:5432' + restart: unless-stopped + + # match-db-ui: + # hostname: "match-db-ui" + # image: redis/redisinsight + # container_name: "match-db-ui" + # ports: + # - "${MATCHING_DB_HOST_MGMT_PORT}:5540" + # restart: unless-stopped + +volumes: + user-db-docker: + external: true + question-db-docker: + external: true + collab-db-docker: + external: true + match-db-docker: + external: true + chat-db-docker: + external: true diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 0000000000..ce4e91c59c --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,348 @@ +# Command: docker-compose --env-file .env.local up -d + +services: + # Databases + user-db: + hostname: 'user-db' + image: 'postgres:16.4' + container_name: 'user-db' + build: + context: ./backend/user/src/lib/db + env_file: + - ./backend/user/.env.compose + volumes: + - 'user-db-docker:${USER_PGDATA}' + restart: unless-stopped + networks: + - user-db-network + healthcheck: + test: ['CMD-SHELL', 'pg_isready -U peerprep-user-express -d user'] + interval: 10s + retries: 5 + start_period: 30s + timeout: 10s + + question-db: + hostname: 'question-db' + image: postgres:16.4 + container_name: 'question-db' + build: + context: ./backend/question/src/lib/db + env_file: + - ./backend/question/.env.compose + volumes: + - 'question-db-docker:${QUESTION_PGDATA}' + restart: unless-stopped + networks: + - question-db-network + healthcheck: + test: ['CMD-SHELL', 'pg_isready -U peerprep-qn-express -d question'] + interval: 10s + retries: 5 + start_period: 30s + timeout: 10s + + collab-db: + hostname: 'collab-db' + image: postgres:16.4 + container_name: 'collab-db' + build: + context: ./backend/collaboration/src/lib/db + env_file: + - ./backend/collaboration/.env.local + volumes: + - 'collab-db-docker:${COLLAB_PGDATA}' + # - ./init.sql:/docker-entrypoint-initdb.d/init.sql + restart: unless-stopped + networks: + - collab-db-network + healthcheck: + test: ['CMD-SHELL', 'pg_isready -U peerprep-collab-express -d collab'] + interval: 10s + retries: 5 + start_period: 30s + timeout: 10s + + chat-db: + hostname: 'chat-db' + image: postgres:16.4 + container_name: 'chat-db' + build: + context: ./backend/chat/src/lib/db + env_file: + - ./backend/chat/.env.compose + volumes: + - 'chat-db-docker:${CHAT_PGDATA}' + # - ./init.sql:/docker-entrypoint-initdb.d/init.sql + restart: unless-stopped + networks: + - chat-db-network + healthcheck: + test: ['CMD-SHELL', 'pg_isready -U peerprep-chat-express -d chat'] + interval: 10s + retries: 5 + start_period: 30s + timeout: 10s + + match-db: + hostname: 'match-db' + # To enable Admin UI for cluster ↙️ + # image: redis/redis-stack + # ports: + # - 8001:8001 + image: redis/redis-stack-server + container_name: 'match-db' + env_file: + - ./backend/matching/.env.compose + volumes: + - 'match-db-docker:/data' + restart: unless-stopped + networks: + - match-db-network + + # Services + user-service: + image: 'user-express' + container_name: '${USER_SERVICE_NAME}' + build: + context: ./backend/user + dockerfile: express.Dockerfile + target: production + args: + # For building with the correct env vars + - port=${USER_EXPRESS_PORT} + env_file: + - ./backend/user/.env.compose + environment: + # Docker Compose Specific for Service Discovery + - EXPRESS_DB_HOST=user-db + - EXPRESS_DB_PORT=5432 + - PEERPREP_UI_HOST=http://${FRONTEND_SERVICE_NAME}:${FRONTEND_PORT} + depends_on: + user-db: + condition: service_healthy + restart: true + networks: + - user-db-network + - user-api-network + healthcheck: + test: wget --no-verbose --tries=1 --spider http://localhost:${USER_EXPRESS_PORT}/health || exit 1 + interval: 30s + timeout: 10s + retries: 5 + start_period: 5s + + question-service: + image: 'question-express' + container_name: '${QUESTION_SERVICE_NAME}' + build: + context: ./backend/question + dockerfile: express.Dockerfile + target: production + args: + # For building with the correct env vars + - port=${QUESTION_EXPRESS_PORT} + env_file: + - ./backend/question/.env.compose + environment: + # Docker Compose Specific for Service Discovery + - EXPRESS_DB_HOST=question-db + - EXPRESS_DB_PORT=5432 + - PEERPREP_UI_HOST=http://${FRONTEND_SERVICE_NAME}:${FRONTEND_PORT} + depends_on: + question-db: + condition: service_healthy + restart: true + networks: + - question-db-network + - question-api-network + healthcheck: + test: wget --no-verbose --tries=1 --spider http://localhost:${QUESTION_EXPRESS_PORT}/health || exit 1 + interval: 30s + timeout: 10s + retries: 5 + start_period: 5s + + collab-service: + image: 'collab-express' + container_name: '${COLLAB_SERVICE_NAME}' + build: + context: ./backend/collaboration + dockerfile: express.Dockerfile + target: production + args: + # For building with the correct env vars + - port=${COLLAB_EXPRESS_PORT} + env_file: + - ./backend/collaboration/.env.compose + environment: + # Docker Compose Specific for Service Discovery + - EXPRESS_DB_HOST=collab-db + - EXPRESS_DB_PORT=5432 + - PEERPREP_UI_HOST=http://${FRONTEND_SERVICE_NAME}:${FRONTEND_PORT} + depends_on: + collab-db: + condition: service_healthy + restart: true + networks: + - collab-db-network + - collab-api-network + healthcheck: + test: wget --no-verbose --tries=1 --spider http://localhost:${COLLAB_EXPRESS_PORT}/health || exit 1 + interval: 30s + timeout: 10s + retries: 5 + start_period: 5s + + matching-service: + image: 'match-express' + container_name: '${MATCHING_SERVICE_NAME}' + build: + context: ./backend/matching + dockerfile: express.Dockerfile + target: production + args: + # For building with the correct env vars + - port=${MATCHING_EXPRESS_PORT} + env_file: + - ./backend/matching/.env.compose + environment: + # Docker Compose Specific for Service Discovery + - MATCHING_DB_HOSTNAME=match-db + - MATCHING_DB_PORT=6379 + - PEERPREP_UI_HOST=http://${FRONTEND_SERVICE_NAME}:${FRONTEND_PORT} + - PEERPREP_USER_HOST=http://${USER_SERVICE_NAME}:${USER_EXPRESS_PORT} + - PEERPREP_QUESTION_HOST=http://${QUESTION_SERVICE_NAME}:${QUESTION_EXPRESS_PORT} + - PEERPREP_COLLAB_HOST=http://${COLLAB_SERVICE_NAME}:${COLLAB_EXPRESS_PORT} + depends_on: + - match-db + - user-service + - question-service + networks: + - match-db-network + - match-api-network + - user-api-network + - question-api-network + - collab-api-network + healthcheck: + test: wget --no-verbose --tries=1 --spider http://localhost:${MATCHING_EXPRESS_PORT}/health || exit 1 + interval: 30s + timeout: 10s + retries: 5 + start_period: 5s + + chat-service: + image: 'chat-express' + container_name: '${CHAT_SERVICE_NAME}' + build: + context: ./backend/chat + dockerfile: express.Dockerfile + target: production + args: + # For building with the correct env vars + - port=${CHAT_EXPRESS_PORT} + env_file: + - ./backend/chat/.env.compose + environment: + # Docker Compose Specific for Service Discovery + - EXPRESS_DB_HOST=chat-db + - EXPRESS_DB_PORT=5432 + - PEERPREP_UI_HOST=http://${FRONTEND_SERVICE_NAME}:${FRONTEND_PORT} + depends_on: + chat-db: + condition: service_healthy + restart: true + networks: + - chat-db-network + - chat-api-network + healthcheck: + test: wget --no-verbose --tries=1 --spider http://localhost:${CHAT_EXPRESS_PORT}/health || exit 1 + interval: 30s + timeout: 10s + retries: 5 + start_period: 5s + + # Frontend + frontend: + image: 'frontend' + container_name: 'frontend' + build: + context: ./frontend + dockerfile: ./frontend.Dockerfile + target: production + args: + - FRONTEND_PORT=${FRONTEND_PORT} + ports: + - '3000:${FRONTEND_PORT}' + env_file: + - ./frontend/.env.compose + environment: + - VITE_USER_SERVICE=http://${USER_SERVICE_NAME}:${USER_EXPRESS_PORT} + - VITE_QUESTION_SERVICE=http://${QUESTION_SERVICE_NAME}:${QUESTION_EXPRESS_PORT} + - VITE_COLLAB_SERVICE=http://${COLLAB_SERVICE_NAME}:${COLLAB_EXPRESS_PORT} + - VITE_COLLAB_WS=ws://${COLLAB_SERVICE_NAME}:${COLLAB_EXPRESS_PORT} + - VITE_MATCHING_SERVICE=http://${MATCHING_SERVICE_NAME}:${MATCHING_EXPRESS_PORT} + - VITE_CHAT_SERVICE=http://${CHAT_SERVICE_NAME}:${CHAT_EXPRESS_PORT} + - FRONTEND_PORT=${FRONTEND_PORT} + depends_on: + user-service: + condition: service_healthy + restart: true + question-service: + condition: service_healthy + restart: true + matching-service: + condition: service_healthy + restart: true + collab-service: + condition: service_healthy + restart: true + chat-service: + condition: service_healthy + restart: true + networks: + - user-api-network + - question-api-network + - match-api-network + - collab-api-network + - chat-api-network + +volumes: + # Persistent Volumes for Databases + user-db-docker: + external: true + question-db-docker: + external: true + # Persistent Room Server + collab-db-docker: + external: true + # Redis Match server + match-db-docker: + external: true + chat-db-docker: + external: true + +networks: + # Isolated API Server Networks + user-db-network: + driver: bridge + question-db-network: + driver: bridge + collab-db-network: + driver: bridge + match-db-network: + driver: bridge + chat-db-network: + driver: bridge + + # View-Controller Networks + user-api-network: + driver: bridge + question-api-network: + driver: bridge + collab-api-network: + driver: bridge + match-api-network: + driver: bridge + chat-api-network: + driver: bridge diff --git a/docs/architecture-dark.png b/docs/architecture-dark.png new file mode 100644 index 0000000000..66cd3117e6 Binary files /dev/null and b/docs/architecture-dark.png differ diff --git a/docs/architecture.png b/docs/architecture.png new file mode 100644 index 0000000000..d1553f08c4 Binary files /dev/null and b/docs/architecture.png differ diff --git a/frontend/.dockerignore b/frontend/.dockerignore new file mode 100644 index 0000000000..b68ba5e260 --- /dev/null +++ b/frontend/.dockerignore @@ -0,0 +1,4 @@ +node_modules +dist +frontend.*Dockerfile +.dockerignore diff --git a/frontend/.env.compose b/frontend/.env.compose new file mode 100644 index 0000000000..c7392a2898 --- /dev/null +++ b/frontend/.env.compose @@ -0,0 +1 @@ +FRONTEND_ENV=compose \ No newline at end of file diff --git a/frontend/.env.docker b/frontend/.env.docker new file mode 100644 index 0000000000..37b66d141b --- /dev/null +++ b/frontend/.env.docker @@ -0,0 +1,8 @@ +FRONTEND_ENV=local + +VITE_USER_SERVICE=http://host.docker.internal:9001 +VITE_QUESTION_SERVICE=http://host.docker.internal:9002 +VITE_COLLAB_SERVICE=http://host.docker.internal:9003 +VITE_MATCHING_SERVICE=http://host.docker.internal:9004 +VITE_CHAT_SERVICE=http://host.docker.internal:9005 +FRONTEND_PORT=3000 diff --git a/frontend/.gitignore b/frontend/.gitignore new file mode 100644 index 0000000000..0d5a4fff3e --- /dev/null +++ b/frontend/.gitignore @@ -0,0 +1,26 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +node_modules +dist +dist-ssr +build +*.local +*.tsbuildinfo + +# Editor directories and files +.vscode/* +!.vscode/extensions.json +.idea +.DS_Store +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? diff --git a/frontend/README.md b/frontend/README.md new file mode 100644 index 0000000000..b23c312879 --- /dev/null +++ b/frontend/README.md @@ -0,0 +1,71 @@ +# Frontend + +## Running with Docker (Standalone) + +1. Enter your OPEN AI Api Key in the .env.docker file. +2. Run this command to build: + ```sh + docker build \ + --build-arg FRONTEND_PORT=3000 \ + -t frontend-app -f frontend.Dockerfile . + ``` +3. Run this command, from the root folder: + + ```sh + make db-up + ``` + +4. Run the necessary migrate and seed commands, if you haven't yet. + +5. Run this command to expose the container: + ```sh + docker run -p 3000:3000 --env-file ./.env.docker frontend-app + ``` + +## Running with Docker-Compose (Main config) + +Edit the variables in the `.env.compose` file and run `make up` from the root folder. + +Any startup instructions will be run from `entrypoint.sh` instead. + +## Expanding the ESLint configuration + +If you are developing a production application, we recommend updating the configuration to enable type aware lint rules: + +- Configure the top-level `parserOptions` property like this: + +```js +export default tseslint.config({ + languageOptions: { + // other options... + parserOptions: { + project: ['./tsconfig.node.json', './tsconfig.app.json'], + tsconfigRootDir: import.meta.dirname, + }, + }, +}); +``` + +- Replace `tseslint.configs.recommended` to `tseslint.configs.recommendedTypeChecked` or `tseslint.configs.strictTypeChecked` +- Optionally add `...tseslint.configs.stylisticTypeChecked` +- Install [eslint-plugin-react](https://github.com/jsx-eslint/eslint-plugin-react) and update the config: + +```js +// eslint.config.js +import react from 'eslint-plugin-react'; + +export default tseslint.config({ + // Set the react version + settings: { react: { version: '18.3' } }, + plugins: { + // Add the react plugin + react, + }, + rules: { + // other rules... + // Enable its recommended rules + ...react.configs.recommended.rules, + ...react.configs['jsx-runtime'].rules, + }, +}); +``` diff --git a/frontend/components.json b/frontend/components.json new file mode 100644 index 0000000000..5bfb23a069 --- /dev/null +++ b/frontend/components.json @@ -0,0 +1,20 @@ +{ + "$schema": "https://ui.shadcn.com/schema.json", + "style": "new-york", + "rsc": false, + "tsx": true, + "tailwind": { + "config": "tailwind.config.js", + "css": "src/styles/globals.css", + "baseColor": "neutral", + "cssVariables": true, + "prefix": "" + }, + "aliases": { + "components": "@/components", + "utils": "@/lib/utils", + "ui": "@/components/ui", + "lib": "@/lib", + "hooks": "@/hooks" + } +} \ No newline at end of file diff --git a/frontend/entrypoint.sh b/frontend/entrypoint.sh new file mode 100644 index 0000000000..28cb6185ad --- /dev/null +++ b/frontend/entrypoint.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +envsubst '${FRONTEND_PORT} ${VITE_USER_SERVICE} ${VITE_QUESTION_SERVICE} ${VITE_COLLAB_SERVICE} ${VITE_COLLAB_WS} ${VITE_MATCHING_SERVICE} ${VITE_CHAT_SERVICE}' < /etc/nginx/nginx.conf.template > /etc/nginx/conf.d/default.conf + +nginx -g 'daemon off;' \ No newline at end of file diff --git a/frontend/eslint.config.js b/frontend/eslint.config.js new file mode 100644 index 0000000000..092408a9f0 --- /dev/null +++ b/frontend/eslint.config.js @@ -0,0 +1,28 @@ +import js from '@eslint/js' +import globals from 'globals' +import reactHooks from 'eslint-plugin-react-hooks' +import reactRefresh from 'eslint-plugin-react-refresh' +import tseslint from 'typescript-eslint' + +export default tseslint.config( + { ignores: ['dist'] }, + { + extends: [js.configs.recommended, ...tseslint.configs.recommended], + files: ['**/*.{ts,tsx}'], + languageOptions: { + ecmaVersion: 2020, + globals: globals.browser, + }, + plugins: { + 'react-hooks': reactHooks, + 'react-refresh': reactRefresh, + }, + rules: { + ...reactHooks.configs.recommended.rules, + 'react-refresh/only-export-components': [ + 'warn', + { allowConstantExport: true }, + ], + }, + }, +) diff --git a/frontend/frontend.Dockerfile b/frontend/frontend.Dockerfile new file mode 100644 index 0000000000..c5a6748c6e --- /dev/null +++ b/frontend/frontend.Dockerfile @@ -0,0 +1,21 @@ +FROM node:lts-alpine AS build + +WORKDIR /app +COPY ./package*.json ./ +RUN npm install +COPY ./ ./ + +RUN npm run build + +FROM nginx:stable-alpine AS production + +COPY --from=build /app/build /usr/share/nginx/html + +COPY ./nginx.conf.template /etc/nginx/nginx.conf.template +COPY entrypoint.sh /usr/local/bin/ +RUN chmod +x /usr/local/bin/entrypoint.sh + +ARG FRONTEND_PORT +EXPOSE ${FRONTEND_PORT} + +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] \ No newline at end of file diff --git a/frontend/frontend.dev.Dockerfile b/frontend/frontend.dev.Dockerfile new file mode 100644 index 0000000000..e1483154c6 --- /dev/null +++ b/frontend/frontend.dev.Dockerfile @@ -0,0 +1,11 @@ +FROM node:20-alpine + +WORKDIR /app/ +COPY ./frontend/package*.json ./ +RUN npm install +COPY ./frontend/ ./ + +ARG port +EXPOSE ${port} + +CMD ["npm", "run", "dev", "--", "--host"] diff --git a/frontend/index.html b/frontend/index.html new file mode 100644 index 0000000000..da530b6a56 --- /dev/null +++ b/frontend/index.html @@ -0,0 +1,13 @@ + + + + + + + PeerPrep + + +
+ + + diff --git a/frontend/nginx.conf.template b/frontend/nginx.conf.template new file mode 100644 index 0000000000..167d6def38 --- /dev/null +++ b/frontend/nginx.conf.template @@ -0,0 +1,85 @@ +map $http_upgrade $connection_upgrade { +default upgrade; +'' close; +} + +server { + listen ${FRONTEND_PORT}; + location / { + root /usr/share/nginx/html; + index index.html; + try_files $uri $uri/ /index.html; + } + + location /user-service/ { + rewrite ^/user-service(/.*)$ $1 break; + proxy_pass ${VITE_USER_SERVICE}; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + location /question-service/ { + rewrite ^/question-service(/.*)$ $1 break; + proxy_pass ${VITE_QUESTION_SERVICE}; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + location /collaboration-service/ { + rewrite ^/collaboration-service(/.*)$ $1 break; + proxy_pass ${VITE_COLLAB_SERVICE}; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + location /matching-service/ { + rewrite ^/matching-service(/.*)$ $1 break; + proxy_pass ${VITE_MATCHING_SERVICE}; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + location /collab-ws { + proxy_pass ${VITE_COLLAB_SERVICE}; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection $connection_upgrade; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + location /matching-socket/ { + proxy_pass ${VITE_MATCHING_SERVICE}; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection $connection_upgrade; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + location /chat-socket/ { + proxy_pass ${VITE_CHAT_SERVICE}; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection $connection_upgrade; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + access_log /var/log/nginx/access.log; + error_log /var/log/nginx/error.log; +} \ No newline at end of file diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000000..3fd0f5add8 --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,87 @@ +{ + "name": "frontend", + "version": "0.0.0", + "type": "module", + "scripts": { + "dev": "env-cmd -f .env.local vite", + "build": "tsc -b && vite build", + "lint": "eslint .", + "preview": "vite preview --port 5173 --host 0.0.0.0" + }, + "dependencies": { + "@codemirror/view": "^6.34.1", + "@hookform/resolvers": "^3.9.0", + "@radix-ui/react-alert-dialog": "^1.1.1", + "@radix-ui/react-checkbox": "^1.1.1", + "@radix-ui/react-dialog": "^1.1.2", + "@radix-ui/react-dropdown-menu": "^2.1.1", + "@radix-ui/react-icons": "^1.3.0", + "@radix-ui/react-label": "^2.1.0", + "@radix-ui/react-navigation-menu": "^1.2.0", + "@radix-ui/react-popover": "^1.1.2", + "@radix-ui/react-scroll-area": "^1.1.0", + "@radix-ui/react-select": "^2.1.1", + "@radix-ui/react-separator": "^1.1.0", + "@radix-ui/react-slot": "^1.1.0", + "@radix-ui/react-tabs": "^1.1.0", + "@radix-ui/react-tooltip": "^1.1.3", + "@radix-ui/react-visually-hidden": "^1.1.0", + "@replit/codemirror-vscode-keymap": "^6.0.2", + "@tanstack/react-query": "^5.56.2", + "@tanstack/react-query-devtools": "^5.56.2", + "@tanstack/react-table": "^8.20.5", + "@uidotdev/usehooks": "^2.4.1", + "@uiw/codemirror-extensions-langs": "^4.23.5", + "@uiw/codemirror-themes-all": "^4.23.5", + "@uiw/react-codemirror": "^4.23.5", + "axios": "^1.7.7", + "class-variance-authority": "^0.7.0", + "clsx": "^2.1.1", + "cmdk": "^1.0.0", + "env-cmd": "^10.1.0", + "lucide-react": "^0.441.0", + "mobx": "^6.13.2", + "mobx-react": "^9.1.1", + "react": "^18.3.1", + "react-dom": "^18.3.1", + "react-hook-form": "^7.53.0", + "react-katex": "^3.0.1", + "react-markdown": "^9.0.1", + "react-router-dom": "^6.26.2", + "react-syntax-highlighter": "^15.6.1", + "rehype-katex": "^7.0.1", + "remark-gfm": "^4.0.0", + "remark-math": "^6.0.0", + "socket.io-client": "^4.8.0", + "tailwind-merge": "^2.5.2", + "tailwind-scrollbar": "^3.1.0", + "tailwindcss-animate": "^1.0.7", + "ws": "^8.18.0", + "y-codemirror.next": "^0.3.5", + "y-websocket": "^2.0.4", + "yjs": "^13.6.20", + "zod": "^3.23.8" + }, + "devDependencies": { + "@eslint/js": "^9.9.0", + "@tailwindcss/typography": "^0.5.15", + "@tanstack/eslint-plugin-query": "^5.56.1", + "@types/node": "^22.5.5", + "@types/react": "^18.3.3", + "@types/react-dom": "^18.3.0", + "@types/react-syntax-highlighter": "^15.5.13", + "@types/ws": "^8.5.12", + "@vitejs/plugin-react-swc": "^3.5.0", + "autoprefixer": "^10.4.20", + "eslint": "^9.9.0", + "eslint-plugin-react-hooks": "^5.1.0-rc.0", + "eslint-plugin-react-refresh": "^0.4.9", + "eslint-plugin-simple-import-sort": "^12.1.1", + "globals": "^15.9.0", + "postcss": "^8.4.47", + "tailwindcss": "^3.4.11", + "typescript": "^5.5.3", + "typescript-eslint": "^8.0.1", + "vite": "^5.4.10" + } +} diff --git a/frontend/postcss.config.js b/frontend/postcss.config.js new file mode 100644 index 0000000000..2e7af2b7f1 --- /dev/null +++ b/frontend/postcss.config.js @@ -0,0 +1,6 @@ +export default { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +} diff --git a/frontend/public/vite.svg b/frontend/public/vite.svg new file mode 100644 index 0000000000..e7b8dfb1b2 --- /dev/null +++ b/frontend/public/vite.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/src/assets/questions.ts b/frontend/src/assets/questions.ts new file mode 100644 index 0000000000..c6702617a2 --- /dev/null +++ b/frontend/src/assets/questions.ts @@ -0,0 +1,188 @@ +import { Question } from '@/types/question-types'; + +export const questionDetails = [ + { + id: '1', + title: 'Reverse a String', + description: + 'Write a function that reverses a string. The input string is given as an array of characters `s`. You must do this by modifying the input array in-place with O(1) extra memory.\n\n**Example 1:**\n\nInput: `s = ["h","e","l","l","o"]`\n\nOutput: `["o","l","l","e","h"]`\n\n**Example 2:**\n\nInput: `s = ["H","a","n","n","a","h"]`\n\nOutput: `["h","a","n","n","a","H"]`\n\n**Constraints:**\n\n* `1 <= s.length <= 105`\n\n* `s[i]` is a printable ASCII character.', + topic: ['Strings', 'Algorithms'], + difficulty: 'Easy', + leetcode: 'https://leetcode.com/problems/reverse-string/', + }, + { + id: '2', + title: 'Linked List Cycle Detection', + description: 'Implement a function to detect if a linked list contains a cycle.', + topic: ['Data Structures', 'Algorithms'], + difficulty: 'Easy', + leetcode: 'https://leetcode.com/problems/linked-list-cycle/', + }, + { + id: '3', + title: 'Roman to Integer', + description: 'Given a Roman numeral, convert it to an integer.', + topic: ['Algorithms'], + difficulty: 'Easy', + leetcode: 'https://leetcode.com/problems/roman-to-integer/', + }, + { + id: '4', + title: 'Add Binary', + description: 'Given two binary strings `a` and `b`, return their sum as a binary string.', + topic: ['Bit Manipulation', 'Algorithms'], + difficulty: 'Easy', + leetcode: 'https://leetcode.com/problems/add-binary/', + }, + { + id: '5', + title: 'Fibonacci Number', + description: + 'The Fibonacci numbers, commonly denoted `F(n)`, form a sequence such that each number is the sum of the two preceding ones, starting from 0 and 1. That is:\n\n* `F(0) = 0`, `F(1) = 1`\n\n* `F(n) = F(n - 1) + F(n - 2)`, for `n > 1`\n\nGiven `n`, calculate `F(n)`.', + topic: ['Recursion', 'Algorithms'], + difficulty: 'Easy', + leetcode: 'https://leetcode.com/problems/fibonacci-number/', + }, + { + id: '6', + title: 'Implement Stack using Queues', + description: + 'Implement a last-in-first-out (LIFO) stack using only two queues. The implemented stack should support all the functions of a normal stack (push, top, pop, and empty).', + topic: ['Data Structures'], + difficulty: 'Easy', + leetcode: 'https://leetcode.com/problems/implement-stack-using-queues/', + }, + { + id: '7', + title: 'Combine Two Tables', + description: + 'Given table `Person` with columns `personId`, `lastName`, and `firstName`, and table `Address` with columns `addressId`, `personId`, `city`, and `state`, write a solution to report the first name, last name, city, and state of each person in the `Person` table. If the address of a `personId` is not present in the `Address` table, report `null` instead.', + topic: ['Databases'], + difficulty: 'Easy', + leetcode: 'https://leetcode.com/problems/combine-two-tables/', + }, + { + id: '8', + title: 'Repeated DNA Sequences', + description: + 'Given a string `s` that represents a DNA sequence, return all the 10-letter-long sequences (substrings) that occur more than once in a DNA molecule. You may return the answer in any order.', + topic: ['Algorithms', 'Bit Manipulation'], + difficulty: 'Medium', + leetcode: 'https://leetcode.com/problems/repeated-dna-sequences/', + }, + { + id: '9', + title: 'Course Schedule', + description: + 'There are a total of `numCourses` courses you have to take, labeled from 0 to `numCourses - 1`. You are given an array `prerequisites` where `prerequisites[i] = [ai, bi]` indicates that you must take course `bi` first if you want to take course `ai`. Return true if you can finish all courses. Otherwise, return false.', + topic: ['Data Structures', 'Algorithms'], + difficulty: 'Medium', + leetcode: 'https://leetcode.com/problems/course-schedule/', + }, + { + id: '10', + title: 'LRU Cache Design', + description: 'Design and implement an LRU (Least Recently Used) cache.', + topic: ['Data Structures'], + difficulty: 'Medium', + leetcode: 'https://leetcode.com/problems/lru-cache/', + }, + { + id: '11', + title: 'Longest Common Subsequence', + description: + 'Given two strings `text1` and `text2`, return the length of their longest common subsequence. If there is no common subsequence, return 0.\n\nA subsequence of a string is a new string generated from the original string with some characters (can be none) deleted without changing the relative order of the remaining characters.\n\nFor example, "ace" is a subsequence of "abcde". A common subsequence of two strings is a subsequence that is common to both strings.', + topic: ['Strings', 'Algorithms'], + difficulty: 'Medium', + leetcode: 'https://leetcode.com/problems/longest-common-subsequence/', + }, + { + id: '12', + title: 'Rotate Image', + description: + 'You are given an `n x n` 2D matrix representing an image, rotate the image by 90 degrees (clockwise).', + topic: ['Arrays', 'Algorithms'], + difficulty: 'Medium', + leetcode: 'https://leetcode.com/problems/rotate-image/', + }, + { + id: '13', + title: 'Airplane Seat Assignment Probability', + description: + 'n passengers board an airplane with exactly n seats. The first passenger has lost the ticket and picks a seat randomly. After that, the rest of the passengers will:\n\n- Take their own seat if it is still available\n- Pick other seats randomly when they find their seat occupied\n\nReturn the probability that the nth person gets their own seat.', + topic: ['Brainteaser'], + difficulty: 'Medium', + leetcode: 'https://leetcode.com/problems/airplane-seat-assignment-probability/', + }, + { + id: '14', + title: 'Validate Binary Search Tree', + description: + 'Given the root of a binary tree, determine if it is a valid binary search tree (BST).', + topic: ['Data Structures', 'Algorithms'], + difficulty: 'Medium', + leetcode: 'https://leetcode.com/problems/validate-binary-search-tree/', + }, + { + id: '15', + title: 'Sliding Window Maximum', + description: + 'You are given an array of integers `nums`. There is a sliding window of size `k` which is moving from the very left of the array to the very right. You can only see the `k` numbers in the window. Each time the sliding window moves right by one position.\n\nReturn the max sliding window.', + topic: ['Arrays', 'Algorithms'], + difficulty: 'Hard', + leetcode: 'https://leetcode.com/problems/sliding-window-maximum/', + }, + { + id: '16', + title: 'N-Queen Problem', + description: + "The n-queens puzzle is the problem of placing n queens on an `n x n` chessboard such that no two queens attack each other.\n\nGiven an integer `n`, return all distinct solutions to the n-queens puzzle. You may return the answer in any order.\n\nEach solution contains a distinct board configuration of the n-queens' placement, where 'Q' and '.' both indicate a queen and an empty space, respectively.", + topic: ['Algorithms'], + difficulty: 'Hard', + leetcode: 'https://leetcode.com/problems/n-queens/', + }, + { + id: '17', + title: 'Serialize and Deserialize a Binary Tree', + description: + 'Serialization is the process of converting a data structure or object into a sequence of bits so that it can be stored in a file or memory buffer or transmitted across a network connection link to be reconstructed later in the same or another computer environment.\n\nDesign an algorithm to serialize and deserialize a binary tree. There is no restriction on how your serialization/deserialization algorithm should work. You just need to ensure that a binary tree can be serialized to a string and this string can be deserialized to the original tree structure.', + topic: ['Data Structures', 'Algorithms'], + difficulty: 'Hard', + leetcode: 'https://leetcode.com/problems/serialize-and-deserialize-binary-tree/', + }, + { + id: '18', + title: 'Wildcard Matching', + description: + "Given an input string `s` and a pattern `p`, implement wildcard pattern matching with support for '?' and '*' where:\n\n- '?' Matches any single character\n- '*' Matches any sequence of characters (including the empty sequence)\n\nThe matching should cover the entire input string (not partial).", + topic: ['Strings', 'Algorithms'], + difficulty: 'Hard', + leetcode: 'https://leetcode.com/problems/wildcard-matching/', + }, + { + id: '19', + title: 'Chalkboard XOR Game', + description: + 'You are given an array of integers `nums` representing the numbers written on a chalkboard. Alice and Bob take turns erasing exactly one number from the chalkboard, with Alice starting first. If erasing a number causes the bitwise XOR of all the elements of the chalkboard to become 0, then that player loses. The bitwise XOR of one element is that element itself, and the bitwise XOR of no elements is 0.\n\nAlso, if any player starts their turn with the bitwise XOR of all the elements of the chalkboard equal to 0, then that player wins.\n\nReturn `true` if and only if Alice wins the game, assuming both players play optimally.', + topic: ['Brainteaser'], + difficulty: 'Hard', + leetcode: 'https://leetcode.com/problems/chalkboard-xor-game/', + }, + { + id: '20', + title: 'Trips and Users', + description: + "Given table `Trips` with columns `id`, `client_id`, `driver_id`, `city_id`, `status`, and `request_at`, where `id` is the primary key. The table holds all taxi trips. Each trip has a unique `id`, while `client_id` and `driver_id` are foreign keys to the `users_id` in the `Users` table.\n\nStatus is an `ENUM` (category) type of (`'completed'`, `'cancelled_by_driver'`, `'cancelled_by_client'`).\n\nGiven table `Users` with columns `users_id`, `banned`, and `role`, `users_id` is the primary key (column with unique values) for this table. The table holds all users. Each user has a unique `users_id` and `role` is an `ENUM` type of (`'client'`, `'driver'`, `'partner'`). `banned` is an `ENUM` category of type (`'Yes'`, `'No'`). The cancellation rate is computed by dividing the number of canceled (by client or driver) requests with unbanned users by the total number of requests with unbanned users on that day.\n\nWrite a solution to find the cancellation rate of requests with unbanned users (both client and driver must not be banned) each day between `\"2013-10-01\"` and `\"2013-10-03\"`. Round the cancellation rate to two decimal points.", + topic: ['Databases'], + difficulty: 'Hard', + leetcode: 'https://leetcode.com/problems/trips-and-users/', + }, +]; + +export const questions: Array = questionDetails.map((question) => ({ + id: parseInt(question.id), + title: question.title, + difficulty: question.difficulty as 'Easy' | 'Medium' | 'Hard', + topic: question.topic, + attempted: false, +})); diff --git a/frontend/src/assets/react.svg b/frontend/src/assets/react.svg new file mode 100644 index 0000000000..6c87de9bb3 --- /dev/null +++ b/frontend/src/assets/react.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/src/components/blocks/_deprecated-nav.tsx b/frontend/src/components/blocks/_deprecated-nav.tsx new file mode 100644 index 0000000000..5cb8b1e5f6 --- /dev/null +++ b/frontend/src/components/blocks/_deprecated-nav.tsx @@ -0,0 +1,44 @@ +import { HamburgerMenuIcon } from '@radix-ui/react-icons'; + +import { Button } from '@/components/ui/button'; +import { cn } from '@/lib/utils'; + +export const Nav = () => { + return ( + // {/* Nav Bar */} +
+ {/* Main Nav */} +
+ {/* Logo */} + + +
+ {/* Mobile Nav */} + + {/* Right Group */} +
+
+
{/* Command Bar */}
+ +
+
+
+ ); +}; diff --git a/frontend/src/components/blocks/authed/index.ts b/frontend/src/components/blocks/authed/index.ts new file mode 100644 index 0000000000..1524a800c3 --- /dev/null +++ b/frontend/src/components/blocks/authed/index.ts @@ -0,0 +1,3 @@ +export * from './main-layout'; +export * from './with-nav-banner'; +export * from './with-nav-blocker'; diff --git a/frontend/src/components/blocks/authed/main-layout.tsx b/frontend/src/components/blocks/authed/main-layout.tsx new file mode 100644 index 0000000000..a8ecfc4bd5 --- /dev/null +++ b/frontend/src/components/blocks/authed/main-layout.tsx @@ -0,0 +1,17 @@ +import { Outlet } from 'react-router-dom'; + +import { cn } from '@/lib/utils'; + +export const AuthedLayout = () => { + return ( +
+ +
+ ); +}; diff --git a/frontend/src/components/blocks/authed/with-nav-banner.tsx b/frontend/src/components/blocks/authed/with-nav-banner.tsx new file mode 100644 index 0000000000..683905b927 --- /dev/null +++ b/frontend/src/components/blocks/authed/with-nav-banner.tsx @@ -0,0 +1,46 @@ +import { type FC, Fragment, type PropsWithChildren } from 'react'; +import { Link } from 'react-router-dom'; + +import { + Breadcrumb, + BreadcrumbItem, + BreadcrumbLink, + BreadcrumbList, + BreadcrumbSeparator, +} from '@/components/ui/breadcrumb'; +import type { BreadCrumb } from '@/lib/routes'; +import { cn } from '@/lib/utils'; + +type IBreadCrumbBannerProps = { + crumbs: Array; +}; + +export const WithNavBanner: FC> = ({ + children, + crumbs, +}) => { + const isLast = (index: number) => index === crumbs.length - 1; + return ( + <> +
+ + + {crumbs.map(({ path, title }, index) => ( + + + + + {title} + + + + {!isLast(index) && } + + ))} + + +
+ {children} + + ); +}; diff --git a/frontend/src/components/blocks/authed/with-nav-blocker.tsx b/frontend/src/components/blocks/authed/with-nav-blocker.tsx new file mode 100644 index 0000000000..b8e9ba7e28 --- /dev/null +++ b/frontend/src/components/blocks/authed/with-nav-blocker.tsx @@ -0,0 +1,55 @@ +import { VisuallyHidden } from '@radix-ui/react-visually-hidden'; +import { FC, PropsWithChildren } from 'react'; +import { useBlocker } from 'react-router-dom'; + +import { Button } from '@/components/ui/button'; +import { + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogTitle, +} from '@/components/ui/dialog'; + +export const WithNavBlocker: FC = ({ children }) => { + const blocker = useBlocker( + ({ currentLocation, nextLocation }) => currentLocation.pathname !== nextLocation.pathname + ); + return ( + <> + {blocker.state === 'blocked' && ( + + + + Are you sure you want to navigate away from this page? + + + + + +
+ + +
+
+ + +
+ )} + {children} + + ); +}; diff --git a/frontend/src/components/blocks/interview/ai-chat.tsx b/frontend/src/components/blocks/interview/ai-chat.tsx new file mode 100644 index 0000000000..bbe5a0638d --- /dev/null +++ b/frontend/src/components/blocks/interview/ai-chat.tsx @@ -0,0 +1,202 @@ +import { type LanguageName } from '@uiw/codemirror-extensions-langs'; +import { MessageSquareIcon } from 'lucide-react'; +import React, { useEffect, useRef, useState } from 'react'; + +import { Button } from '@/components/ui/button'; +import { sendChatMessage } from '@/services/ai-service'; + +import { ChatLayout } from './chat/chat-layout'; +import { ChatMessageType } from './chat/chat-message'; + +const STORAGE_KEY = 'ai_chat_history'; + +interface AIChatProps { + isOpen: boolean; + onClose: () => void; + editorCode?: string; + language?: LanguageName; + questionDetails?: string; +} + +interface StoredChat { + messages: Array; + questionDetails: string; +} + +const prompts = [ + 'Help me understand the code written.', + 'Give me some suggestions to solve the problem.', +]; + +export const AIChat: React.FC = ({ + isOpen, + onClose, + editorCode = '', + language = 'python', + questionDetails = '', +}) => { + const [messages, setMessages] = useState>([]); + const [isLoading, setIsLoading] = useState(false); + const [error, setError] = useState(null); + const streamingTextRef = useRef(''); + const prevQuestionRef = useRef(questionDetails); + + useEffect(() => { + const loadMessages = () => { + const stored = localStorage.getItem(STORAGE_KEY); + + if (stored) { + const { messages: storedMessages, questionDetails: storedQuestion } = JSON.parse( + stored + ) as StoredChat; + + // If question has changed, clear the history + if (storedQuestion !== questionDetails) { + localStorage.removeItem(STORAGE_KEY); + setMessages([]); + } else { + // Convert stored timestamps back to Date objects + const messagesWithDates = storedMessages.map((msg) => ({ + ...msg, + timestamp: new Date(msg.timestamp), + })); + setMessages(messagesWithDates); + } + } + }; + + loadMessages(); + prevQuestionRef.current = questionDetails; + }, [questionDetails]); + + const handleClearHistory = () => { + localStorage.removeItem(STORAGE_KEY); + setMessages([]); + }; + + useEffect(() => { + if (messages.length > 0) { + const dataToStore: StoredChat = { + messages, + questionDetails, + }; + localStorage.setItem(STORAGE_KEY, JSON.stringify(dataToStore)); + } + }, [messages, questionDetails]); + + const handleSend = async (userMessage: string): Promise => { + if (!userMessage.trim() || isLoading) return; + + // Reset streaming text reference + streamingTextRef.current = ''; + + // Add user message + const newMessage: ChatMessageType = { + text: userMessage, + isUser: true, + timestamp: new Date(), + }; + + setMessages((prev) => [...prev, newMessage]); + setIsLoading(true); + setError(null); + + try { + const payload = { + messages: [...messages, newMessage].map((v) => ({ + role: v.isUser ? 'user' : 'assistant', + content: v.text, + })), + editorCode, + language, + questionDetails, + }; + + // Add AI response placeholder + setMessages((prev) => [ + ...prev, + { + text: '', + isUser: false, + timestamp: new Date(), + isStreaming: true, + }, + ]); + + const response = await sendChatMessage(payload, (chunk) => { + // Update streaming text + streamingTextRef.current = chunk; + + // Update the last message with the accumulated text + setMessages((prev) => { + const newMessages = [...prev]; + newMessages[newMessages.length - 1] = { + text: streamingTextRef.current, + isUser: false, + timestamp: new Date(), + isStreaming: true, + }; + return newMessages; + }); + }); + + if (response.success) { + setMessages((prev) => { + const newMessages = [...prev]; + newMessages[newMessages.length - 1] = { + text: newMessages[newMessages.length - 1].text, + isUser: false, + timestamp: new Date(), + isStreaming: false, + }; + return newMessages; + }); + } else { + setError('Failed to get response from AI'); + // Remove the streaming message if there was an error + setMessages((prev) => prev.slice(0, -1)); + } + } catch (err) { + setError( + err instanceof Error ? err.message : 'An error occurred while fetching the response' + ); + // Remove the streaming message if there was an error + setMessages((prev) => prev.slice(0, -1)); + } finally { + setIsLoading(false); + streamingTextRef.current = ''; + } + }; + + return ( + ( +
+ +

No messages yet. Start a conversation, or use one of these prompts:

+
+ {prompts.map((value, index) => ( + + ))} +
+
+ )} + /> + ); +}; diff --git a/frontend/src/components/blocks/interview/chat/chat-layout.tsx b/frontend/src/components/blocks/interview/chat/chat-layout.tsx new file mode 100644 index 0000000000..13f29de57e --- /dev/null +++ b/frontend/src/components/blocks/interview/chat/chat-layout.tsx @@ -0,0 +1,191 @@ +import { Loader2, MessageSquare, Send, Trash2, X } from 'lucide-react'; +import React, { ChangeEvent, KeyboardEvent, useEffect, useRef, useState } from 'react'; + +import { Alert, AlertDescription } from '@/components/ui/alert'; +import { + AlertDialog, + AlertDialogAction, + AlertDialogCancel, + AlertDialogContent, + AlertDialogDescription, + AlertDialogFooter, + AlertDialogHeader, + AlertDialogTitle, + AlertDialogTrigger, +} from '@/components/ui/alert-dialog'; +import { Button } from '@/components/ui/button'; +import { ScrollArea } from '@/components/ui/scroll-area'; +import { Textarea } from '@/components/ui/textarea'; + +import { ChatMessage, ChatMessageType } from './chat-message'; + +type CustomElemProps = { + onSend: (message: string) => void; +}; + +interface ChatLayoutProps { + isOpen: boolean; + onClose: () => void; + messages: Array; + onSend: (message: string) => void; + isLoading: boolean; + error: string | null; + title: string; + onClearHistory?: () => void; + CustomPlaceHolderElem?: React.FC; +} + +export const ChatLayout = ({ + isOpen, + onClose, + messages, + onSend, + isLoading, + error, + title, + onClearHistory, + CustomPlaceHolderElem, +}: ChatLayoutProps) => { + const [input, setInput] = useState(''); + const inputRef = useRef(null); + const messagesEndRef = useRef(null); + + const scrollToBottom = () => { + messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' }); + }; + + useEffect(() => { + if (isOpen) { + inputRef.current?.focus(); + scrollToBottom(); + } + }, [isOpen]); + + useEffect(() => { + scrollToBottom(); + }, [messages, isLoading]); + + useEffect(() => { + const textAreaEl = inputRef.current; + + if (textAreaEl) { + textAreaEl.style.height = 'auto'; + textAreaEl.style.height = `${Math.min(textAreaEl.scrollHeight, 100)}px`; + } + }, [input]); + + const handleSend = () => { + if (input.trim()) { + onSend(input.trim()); + setInput(''); + } + }; + + const handleKeyPress = (e: KeyboardEvent) => { + if (e.key === 'Enter' && !e.shiftKey) { + e.preventDefault(); + handleSend(); + } + }; + + return ( +
+
+
+

{title}

+
+
+ {onClearHistory && ( + + + + + + + Clear Chat History + + Are you sure you want to clear the chat history? This action cannot be undone. + + + + Cancel + Clear History + + + + )} + +
+
+ + + {messages.length === 0 && ( +
+ {CustomPlaceHolderElem ? ( + + ) : ( + <> + +

No messages yet. Start a conversation!

+ + )} +
+ )} + {messages.map((msg, index) => ( + + ))} + {isLoading && ( +
+ +
+ )} + {error && ( + + {error} + + )} +
+ + +
+
+